<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 03:25:28 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-16264] assertion in osp_precreate_send req-&gt;rq_transno == 0</title>
                <link>https://jira.whamcloud.com/browse/LU-16264</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;There&apos;s a regular crash in conf-sanity test 30a (janitor) or 30b (maloo) that unfolds like this &lt;a href=&quot;https://testing.whamcloud.com/test_sets/683c15ea-ec59-46d0-89c1-7f10d715f355&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/683c15ea-ec59-46d0-89c1-7f10d715f355&lt;/a&gt;&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;[ 5673.591855] Lustre: DEBUG MARKER: /usr/sbin/lctl mark onyx-60vm1.onyx.whamcloud.com: executing wait_import_state_mount FULL osc.lustre-OST0000-osc-[-0-9a-f]*.ost_server_uuid
[ 5673.592761] Lustre: DEBUG MARKER: /usr/sbin/lctl mark onyx-60vm2.onyx.whamcloud.com: executing wait_import_state_mount FULL osc.lustre-OST0000-osc-[-0-9a-f]*.ost_server_uuid
[ 5673.882066] Lustre: DEBUG MARKER: onyx-60vm1.onyx.whamcloud.com: executing wait_import_state_mount FULL osc.lustre-OST0000-osc-[-0-9a-f]*.ost_server_uuid
[ 5673.948710] Lustre: DEBUG MARKER: onyx-60vm2.onyx.whamcloud.com: executing wait_import_state_mount FULL osc.lustre-OST0000-osc-[-0-9a-f]*.ost_server_uuid
[ 5679.785437] Lustre: DEBUG MARKER: /usr/sbin/lctl conf_param lustre-OST0000.failover.node=&apos;10.240.25.54@tcp&apos;
[ 5684.257444] Lustre: DEBUG MARKER: /usr/sbin/lctl conf_param -d lustre-OST0000.failover.node
[ 5686.387703] LustreError: 167-0: lustre-OST0000-osc-MDT0002: This client was evicted by lustre-OST0000; in progress operations using this service will fail.
[ 5686.390343] LustreError: Skipped 1 previous similar message
[ 5686.391768] Lustre: lustre-OST0000-osc-MDT0002: Connection restored to 10.240.25.54@tcp (at 10.240.25.54@tcp)
[ 5686.393594] Lustre: Skipped 1 previous similar message
[ 5686.576332] LustreError: 15573:0:(osp_precreate.c:684:osp_precreate_send()) ASSERTION( req-&amp;gt;rq_transno == 0 ) failed: 
[ 5686.578284] LustreError: 15573:0:(osp_precreate.c:684:osp_precreate_send()) LBUG
[ 5686.579611] Pid: 15573, comm: osp-pre-0-2 3.10.0-1160.76.1.el7_lustre.ddn17.x86_64 #1 SMP Fri Oct 14 20:32:07 UTC 2022
[ 5686.581497] Call Trace:
[ 5686.582054] [&amp;lt;0&amp;gt;] libcfs_call_trace+0x90/0xf0 [libcfs]
[ 5686.582998] [&amp;lt;0&amp;gt;] lbug_with_loc+0x4c/0xa0 [libcfs]
[ 5686.583926] [&amp;lt;0&amp;gt;] osp_precreate_send+0x1106/0x11b0 [osp]
[ 5686.584908] [&amp;lt;0&amp;gt;] osp_precreate_thread+0x6ba/0x13b0 [osp]
[ 5686.585956] [&amp;lt;0&amp;gt;] kthread+0xd1/0xe0
[ 5686.586645] [&amp;lt;0&amp;gt;] ret_from_fork_nospec_begin+0x21/0x21
[ 5686.587621] [&amp;lt;0&amp;gt;] 0xfffffffffffffffe
[ 5686.588300] Kernel panic - not syncing: LBUG
[ 5686.589082] CPU: 0 PID: 15573 Comm: osp-pre-0-2 Kdump: loaded Tainted: G           OE  ------------   3.10.0-1160.76.1.el7_lustre.ddn17.x86_64 #1
[ 5686.591324] Hardware name: Red Hat KVM, BIOS 0.5.1 01/01/2011
[ 5686.592332] Call Trace:
[ 5686.592803]  [&amp;lt;ffffffff875865c9&amp;gt;] dump_stack+0x19/0x1b
[ 5686.593723]  [&amp;lt;ffffffff875802d1&amp;gt;] panic+0xe8/0x21f
[ 5686.594578]  [&amp;lt;ffffffffc075ba1b&amp;gt;] lbug_with_loc+0x9b/0xa0 [libcfs]
[ 5686.595672]  [&amp;lt;ffffffffc12e1f56&amp;gt;] osp_precreate_send+0x1106/0x11b0 [osp]
[ 5686.596843]  [&amp;lt;ffffffffc12e464a&amp;gt;] osp_precreate_thread+0x6ba/0x13b0 [osp]
[ 5686.598027]  [&amp;lt;ffffffff86ec7080&amp;gt;] ? wake_up_atomic_t+0x30/0x30
[ 5686.599052]  [&amp;lt;ffffffffc12e3f90&amp;gt;] ? osp_init_pre_fid+0x630/0x630 [osp]
[ 5686.600244]  [&amp;lt;ffffffff86ec5f91&amp;gt;] kthread+0xd1/0xe0
[ 5686.601114]  [&amp;lt;ffffffff86ec5ec0&amp;gt;] ? insert_kthread_work+0x40/0x40
[ 5686.602190]  [&amp;lt;ffffffff87599df7&amp;gt;] ret_from_fork_nospec_begin+0x21/0x21
[ 5686.603335]  [&amp;lt;ffffffff86ec5ec0&amp;gt;] ? insert_kthread_work+0x40/0x40 &lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;or &lt;a href=&quot;http://testing.linuxhacker.ru/lustre-reports/25993/testresults/conf-sanity1-ldiskfs-DNE-centos7_x86_64-centos7_x86_64/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://testing.linuxhacker.ru/lustre-reports/25993/testresults/conf-sanity1-ldiskfs-DNE-centos7_x86_64-centos7_x86_64/&lt;/a&gt;&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;[ 3227.316101] Lustre: Skipped 1 previous similar message
[ 3227.321723] Lustre: lustre-OST0000: Imperative Recovery not enabled, recovery window 60-180
[ 3228.406622] Lustre: DEBUG MARKER: oleg208-server.virtnet: executing set_default_debug -1 all 8
[ 3230.985273] Lustre: DEBUG MARKER: oleg208-client.virtnet: executing wait_import_state_mount FULL osc.lustre-OST0000-osc-[-0-9a-f]*.ost_server_uuid
[ 3231.624671] Lustre: lustre-OST0000: Connection restored to 50155b09-b14b-6aa1-56c4-bebc671f968e (at 192.168.201.8@tcp)
[ 3231.627520] Lustre: Skipped 3 previous similar messages
[ 3242.148194] Lustre: MGS: Connection restored to 50155b09-b14b-6aa1-56c4-bebc671f968e (at 192.168.201.8@tcp)
[ 3242.149935] Lustre: Skipped 2 previous similar messages
[ 3243.345638] LustreError: 11-0: lustre-OST0000-osc-MDT0000: operation ost_statfs to node 0@lo failed: rc = -107
[ 3243.351508] Lustre: lustre-OST0000-osc-MDT0000: Connection to lustre-OST0000 (at 0@lo) was lost; in progress operations using this service will wait for recovery to complete
[ 3243.361084] Lustre: lustre-OST0000: Not available for connect from 0@lo (stopping)
[ 3243.370317] LustreError: 167-0: lustre-OST0000-osc-MDT0000: This client was evicted by lustre-OST0000; in progress operations using this service will fail.
[ 3243.404559] LustreError: 10962:0:(osp_precreate.c:683:osp_precreate_send()) ASSERTION( req-&amp;gt;rq_transno == 0 ) failed: 
[ 3243.411037] LustreError: 10962:0:(osp_precreate.c:683:osp_precreate_send()) LBUG
[ 3243.414422] Pid: 10962, comm: osp-pre-0-1 3.10.0-7.9-debug #1 SMP Sat Mar 26 23:28:42 EDT 2022
[ 3243.418602] Call Trace:
[ 3243.419815]  [&amp;lt;ffffffffa018d7dc&amp;gt;] libcfs_call_trace+0x8c/0xc0 [libcfs]
[ 3243.422930]  [&amp;lt;ffffffffa018d88c&amp;gt;] lbug_with_loc+0x4c/0xa0 [libcfs]
[ 3243.425789]  [&amp;lt;ffffffffa0e7a8b6&amp;gt;] osp_precreate_send+0x11a6/0x11c0 [osp]
[ 3243.428811]  [&amp;lt;ffffffffa0e7c218&amp;gt;] osp_precreate_thread+0x818/0x1360 [osp]
[ 3243.431581]  [&amp;lt;ffffffff810ba114&amp;gt;] kthread+0xe4/0xf0
[ 3243.433857]  [&amp;lt;ffffffff817f3e5d&amp;gt;] ret_from_fork_nospec_begin+0x7/0x21
[ 3243.437050]  [&amp;lt;ffffffffffffffff&amp;gt;] 0xffffffffffffffff
[ 3243.439596] Kernel panic - not syncing: LBUG &lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;Happens on a variety of branches including b2_12, b2_15 and even master at times, and all the derivatives.&lt;/p&gt;</description>
                <environment></environment>
        <key id="72935">LU-16264</key>
            <summary>assertion in osp_precreate_send req-&gt;rq_transno == 0</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="3" iconUrl="https://jira.whamcloud.com/images/icons/priorities/major.svg">Major</priority>
                        <status id="4" iconUrl="https://jira.whamcloud.com/images/icons/statuses/reopened.png" description="This issue was once resolved, but the resolution was deemed incorrect. From here issues are either marked assigned or resolved.">Reopened</status>
                    <statusCategory id="2" key="new" colorName="default"/>
                                    <resolution id="-1">Unresolved</resolution>
                                        <assignee username="scherementsev">Sergey Cheremencev</assignee>
                                    <reporter username="green">Oleg Drokin</reporter>
                        <labels>
                    </labels>
                <created>Tue, 25 Oct 2022 17:54:10 +0000</created>
                <updated>Wed, 25 Oct 2023 13:22:18 +0000</updated>
                                            <version>Lustre 2.15.2</version>
                    <version>Lustre 2.15.3</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>10</watches>
                                                                            <comments>
                            <comment id="358075" author="mdiep" created="Fri, 6 Jan 2023 00:45:07 +0000"  >&lt;p&gt;+1 on 2.15.2 &lt;a href=&quot;https://testing.whamcloud.com/test_sets/0b143d9e-b9a4-423e-be85-5fb590667675&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/0b143d9e-b9a4-423e-be85-5fb590667675&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="373512" author="ssmirnov" created="Thu, 25 May 2023 20:12:14 +0000"  >&lt;p&gt;+1 on b2_15: &lt;a href=&quot;https://testing.whamcloud.com/test_sets/83ce8d03-abd2-4bc7-b957-f651b84bc179&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/83ce8d03-abd2-4bc7-b957-f651b84bc179&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="375947" author="eaujames" created="Tue, 20 Jun 2023 10:11:39 +0000"  >&lt;p&gt;+1 on 2.15.3-RC1: &lt;a href=&quot;https://testing.whamcloud.com/test_sets/179ae50b-8ea7-4be8-807f-16b73799b897&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/179ae50b-8ea7-4be8-807f-16b73799b897&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="376004" author="adilger" created="Tue, 20 Jun 2023 17:37:30 +0000"  >&lt;p&gt;If this is being hit on b2_15 without master/2.16 servers, then it seems to be a different problem than &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-15671&quot; title=&quot;conf-sanity test_30b: MDS assertion in osp_precreate_send req-&amp;gt;rq_transno == 0&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-15671&quot;&gt;&lt;del&gt;LU-15671&lt;/del&gt;&lt;/a&gt;, which AFAIK was caused by the &quot;resend precreate RPCs&quot; patch and that was never landed to b2_15, so I don&apos;t think the patch &lt;a href=&quot;https://review.whamcloud.com/51056&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/51056&lt;/a&gt; &quot;&lt;tt&gt;&lt;a href=&quot;https://jira.whamcloud.com/browse/LU-15671&quot; title=&quot;conf-sanity test_30b: MDS assertion in osp_precreate_send req-&amp;gt;rq_transno == 0&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-15671&quot;&gt;&lt;del&gt;LU-15671&lt;/del&gt;&lt;/a&gt; mds: do not send OST_CREATE transno interop&lt;/tt&gt;&quot; will fix it. &lt;/p&gt;
</comment>
                            <comment id="390491" author="sergey" created="Wed, 25 Oct 2023 13:22:18 +0000"  >&lt;p&gt;Hi &lt;a href=&quot;https://jira.whamcloud.com/secure/ViewProfile.jspa?name=green&quot; class=&quot;user-hover&quot; rel=&quot;green&quot;&gt;green&lt;/a&gt; , do you still face this issue or we can close this ticket?&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10010">
                    <name>Duplicate</name>
                                            <outwardlinks description="duplicates">
                                        <issuelink>
            <issuekey id="69200">LU-15671</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|i033pz:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>