Actions
Bug #60635
opencrash: BlueStore::_txc_finish_io(BlueStore::TransContext*)
Status:
New
Priority:
Normal
Assignee:
-
Target version:
-
% Done:
0%
Source:
Telemetry
Tags:
Backport:
Regression:
No
Severity:
3 - minor
Reviewed:
Affected Versions:
ceph-qa-suite:
Pull request ID:
Crash signature (v1):
9393f6f0097cb9743e2efaf137ce7e3068de4aca1bd60d146d6f5ac6450aee38
Crash signature (v2):
Description
Sanitized backtrace:
BlueStore::_txc_finish_io(BlueStore::TransContext*) BlueStore::queue_transactions(boost::intrusive_ptr<ObjectStore::CollectionImpl>&, std::vector<ceph::os::Transaction, std::allocator<ceph::os::Transaction> >&, boost::intrusive_ptr<TrackedOp>, ThreadPool::TPHandle*) non-virtual thunk to PrimaryLogPG::queue_transactions(std::vector<ceph::os::Transaction, std::allocator<ceph::os::Transaction> >&, boost::intrusive_ptr<OpRequest>) ECBackend::handle_sub_write(pg_shard_t, boost::intrusive_ptr<OpRequest>, ECSubWrite&, ZTracer::Trace const&) ECBackend::_handle_message(boost::intrusive_ptr<OpRequest>) PGBackend::handle_message(boost::intrusive_ptr<OpRequest>) PrimaryLogPG::do_request(boost::intrusive_ptr<OpRequest>&, ThreadPool::TPHandle&) OSD::dequeue_op(boost::intrusive_ptr<PG>, boost::intrusive_ptr<OpRequest>, ThreadPool::TPHandle&) ceph::osd::scheduler::PGOpItem::run(OSD*, OSDShard*, boost::intrusive_ptr<PG>&, ThreadPool::TPHandle&) OSD::ShardedOpWQ::_process(unsigned int, ceph::heartbeat_handle_d*) ShardedThreadPool::shardedthreadpool_worker(unsigned int) ShardedThreadPool::WorkThreadSharded::entry()
Crash dump sample:
{ "backtrace": [ "/lib64/libpthread.so.0(+0x12cf0) [0x7f6bf7722cf0]", "(BlueStore::_txc_finish_io(BlueStore::TransContext*)+0x4) [0x5602e7349784]", "(BlueStore::queue_transactions(boost::intrusive_ptr<ObjectStore::CollectionImpl>&, std::vector<ceph::os::Transaction, std::allocator<ceph::os::Transaction> >&, boost::intrusive_ptr<TrackedOp>, ThreadPool::TPHandle*)+0x621) [0x5602e736dda1]", "(non-virtual thunk to PrimaryLogPG::queue_transactions(std::vector<ceph::os::Transaction, std::allocator<ceph::os::Transaction> >&, boost::intrusive_ptr<OpRequest>)+0x53) [0x5602e6f32203]", "(ECBackend::handle_sub_write(pg_shard_t, boost::intrusive_ptr<OpRequest>, ECSubWrite&, ZTracer::Trace const&)+0x11c1) [0x5602e71bc9e1]", "(ECBackend::_handle_message(boost::intrusive_ptr<OpRequest>)+0x89b) [0x5602e71d2feb]", "(PGBackend::handle_message(boost::intrusive_ptr<OpRequest>)+0x56) [0x5602e6f852c6]", "(PrimaryLogPG::do_request(boost::intrusive_ptr<OpRequest>&, ThreadPool::TPHandle&)+0x512) [0x5602e6f179f2]", "(OSD::dequeue_op(boost::intrusive_ptr<PG>, boost::intrusive_ptr<OpRequest>, ThreadPool::TPHandle&)+0x1c0) [0x5602e6d6d900]", "(ceph::osd::scheduler::PGOpItem::run(OSD*, OSDShard*, boost::intrusive_ptr<PG>&, ThreadPool::TPHandle&)+0x6d) [0x5602e706c2ad]", "(OSD::ShardedOpWQ::_process(unsigned int, ceph::heartbeat_handle_d*)+0x115f) [0x5602e6d80dbf]", "(ShardedThreadPool::shardedthreadpool_worker(unsigned int)+0x435) [0x5602e74de8c5]", "(ShardedThreadPool::WorkThreadSharded::entry()+0x14) [0x5602e74e0fe4]", "/lib64/libpthread.so.0(+0x81ca) [0x7f6bf77181ca]", "clone()" ], "ceph_version": "17.2.5", "crash_id": "2023-03-09T19:51:52.908767Z_11ea4622-09e8-4230-b7df-c58c4a590900", "entity_name": "osd.63c0397702033cd6b8fcf7110faff918f6da3cf4", "os_id": "centos", "os_name": "CentOS Stream", "os_version": "8", "os_version_id": "8", "process_name": "ceph-osd", "stack_sig": "9393f6f0097cb9743e2efaf137ce7e3068de4aca1bd60d146d6f5ac6450aee38", "timestamp": "2023-03-09T19:51:52.908767Z", "utsname_machine": "x86_64", "utsname_release": "5.19.0-35-generic", "utsname_sysname": "Linux", "utsname_version": "#36~22.04.1-Ubuntu SMP PREEMPT_DYNAMIC Fri Feb 17 15:17:25 UTC 2" }
Updated by Telemetry Bot about 1 year ago
Actions