+ PS4='${BASH_SOURCE[0]}:$LINENO: ${FUNCNAME[0]}: ' ../qa/workunits/ceph-helpers.sh:1316: main: export PATH=ceph-disk/virtualenv/bin:ceph-detect-init/virtualenv/bin:.:/sbin:/bin:/usr/sbin:/usr/bin ../qa/workunits/ceph-helpers.sh:1316: main: PATH=ceph-disk/virtualenv/bin:ceph-detect-init/virtualenv/bin:.:/sbin:/bin:/usr/sbin:/usr/bin ../qa/workunits/ceph-helpers.sh:1318: main: export CEPH_CONF=/dev/null ../qa/workunits/ceph-helpers.sh:1318: main: CEPH_CONF=/dev/null ../qa/workunits/ceph-helpers.sh:1319: main: unset CEPH_ARGS ../qa/workunits/ceph-helpers.sh:1321: main: local code ../qa/workunits/ceph-helpers.sh:1322: main: run testdir/osd-crush ./test/mon/osd-crush.sh:21: run: local dir=testdir/osd-crush ./test/mon/osd-crush.sh:22: run: shift ./test/mon/osd-crush.sh:24: run: export CEPH_MON=127.0.0.1:7104 ./test/mon/osd-crush.sh:24: run: CEPH_MON=127.0.0.1:7104 ./test/mon/osd-crush.sh:25: run: export CEPH_ARGS ../test/mon/osd-crush.sh:26: run: uuidgen ./test/mon/osd-crush.sh:26: run: CEPH_ARGS+='--fsid=c4878902-7748-4a77-afab-07655d3b0201 --auth-supported=none ' ./test/mon/osd-crush.sh:27: run: CEPH_ARGS+='--mon-host=127.0.0.1:7104 ' ../test/mon/osd-crush.sh:29: run: set ../test/mon/osd-crush.sh:29: run: sed -n -e 's/^\(TEST_[0-9a-z_]*\) .*/\1/p' ./test/mon/osd-crush.sh:29: run: local 'funcs=TEST_add_ruleset_failed TEST_crush_reject_empty TEST_crush_rename_bucket TEST_crush_repair_faulty_crushmap TEST_crush_rule_create_erasure TEST_crush_rule_create_simple TEST_crush_rule_dump TEST_crush_rule_rm TEST_crush_ruleset_match_rule_when_creating TEST_crush_tree' ./test/mon/osd-crush.sh:30: run: for func in '$funcs' ./test/mon/osd-crush.sh:31: run: setup testdir/osd-crush ../qa/workunits/ceph-helpers.sh:92: setup: local dir=testdir/osd-crush ../qa/workunits/ceph-helpers.sh:93: setup: teardown testdir/osd-crush ../qa/workunits/ceph-helpers.sh:117: teardown: local dir=testdir/osd-crush ../qa/workunits/ceph-helpers.sh:118: teardown: kill_daemons testdir/osd-crush KILL .../qa/workunits/ceph-helpers.sh:195: kill_daemons: shopt -q -o xtrace .../qa/workunits/ceph-helpers.sh:195: kill_daemons: echo true ../qa/workunits/ceph-helpers.sh:195: kill_daemons: local trace=true ../qa/workunits/ceph-helpers.sh:196: kill_daemons: true ../qa/workunits/ceph-helpers.sh:196: kill_daemons: shopt -u -o xtrace ../qa/workunits/ceph-helpers.sh:222: kill_daemons: return 0 .../qa/workunits/ceph-helpers.sh:119: teardown: stat -f -c %T . ../qa/workunits/ceph-helpers.sh:119: teardown: '[' btrfs == btrfs ']' ../qa/workunits/ceph-helpers.sh:120: teardown: __teardown_btrfs testdir/osd-crush ../qa/workunits/ceph-helpers.sh:126: __teardown_btrfs: local btrfs_base_dir=testdir/osd-crush .../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: ls -l testdir/osd-crush .../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: egrep '^d' .../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: awk '{print $9}' ls: impossible d'accéder à testdir/osd-crush: Aucun fichier ou dossier de ce type ../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: btrfs_dirs= .../qa/workunits/ceph-helpers.sh:129: __teardown_btrfs: pwd ../qa/workunits/ceph-helpers.sh:129: __teardown_btrfs: current_path=/ceph/src .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: cd testdir/osd-crush ../qa/workunits/ceph-helpers.sh: ligne 131 : cd: testdir/osd-crush: Aucun fichier ou dossier de ce type .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: btrfs subvolume list . -t .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: egrep '^[0-9]' .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: awk '{print $4}' .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: grep testdir/osd-crush/ ../qa/workunits/ceph-helpers.sh:122: teardown: rm -fr testdir/osd-crush ../qa/workunits/ceph-helpers.sh:94: setup: mkdir -p testdir/osd-crush ./test/mon/osd-crush.sh:32: run: TEST_add_ruleset_failed testdir/osd-crush ./test/mon/osd-crush.sh:175: TEST_add_ruleset_failed: local dir=testdir/osd-crush ./test/mon/osd-crush.sh:177: TEST_add_ruleset_failed: run_mon testdir/osd-crush a ../qa/workunits/ceph-helpers.sh:293: run_mon: local dir=testdir/osd-crush ../qa/workunits/ceph-helpers.sh:294: run_mon: shift ../qa/workunits/ceph-helpers.sh:295: run_mon: local id=a ../qa/workunits/ceph-helpers.sh:296: run_mon: shift ../qa/workunits/ceph-helpers.sh:297: run_mon: local data=testdir/osd-crush/a ../qa/workunits/ceph-helpers.sh:300: run_mon: ceph-mon --id a --mkfs --mon-data=testdir/osd-crush/a --run-dir=testdir/osd-crush ceph-mon: mon.noname-a 127.0.0.1:7104/0 is local, renaming to mon.a ceph-mon: set fsid to c4878902-7748-4a77-afab-07655d3b0201 ceph-mon: created monfs at testdir/osd-crush/a for mon.a ../qa/workunits/ceph-helpers.sh:307: run_mon: ceph-mon --id a --mon-osd-full-ratio=.99 --mon-data-avail-crit=1 --paxos-propose-interval=0.1 --osd-crush-chooseleaf-type=0 --erasure-code-dir=.libs --plugin-dir=.libs --debug-mon 20 --debug-ms 20 --debug-paxos 20 --chdir= --mon-data=testdir/osd-crush/a '--log-file=testdir/osd-crush/$name.log' '--admin-socket=testdir/osd-crush/$cluster-$name.asok' --mon-cluster-log-file=testdir/osd-crush/log --run-dir=testdir/osd-crush '--pid-file=testdir/osd-crush/$name.pid' ../qa/workunits/ceph-helpers.sh:326: run_mon: cat .../qa/workunits/ceph-helpers.sh:326: run_mon: get_config mon a fsid .../qa/workunits/ceph-helpers.sh:704: get_config: local daemon=mon .../qa/workunits/ceph-helpers.sh:705: get_config: local id=a .../qa/workunits/ceph-helpers.sh:706: get_config: local config=fsid .../qa/workunits/ceph-helpers.sh:708: get_config: CEPH_ARGS= .../qa/workunits/ceph-helpers.sh:708: get_config: ceph --format xml daemon testdir/osd-crush/ceph-mon.a.asok config get fsid .../qa/workunits/ceph-helpers.sh:711: get_config: xmlstarlet sel -t -m //fsid -v . -n .../qa/workunits/ceph-helpers.sh:326: run_mon: get_config mon a mon_host .../qa/workunits/ceph-helpers.sh:704: get_config: local daemon=mon .../qa/workunits/ceph-helpers.sh:705: get_config: local id=a .../qa/workunits/ceph-helpers.sh:706: get_config: local config=mon_host .../qa/workunits/ceph-helpers.sh:708: get_config: CEPH_ARGS= .../qa/workunits/ceph-helpers.sh:708: get_config: ceph --format xml daemon testdir/osd-crush/ceph-mon.a.asok config get mon_host .../qa/workunits/ceph-helpers.sh:711: get_config: xmlstarlet sel -t -m //mon_host -v . -n .../qa/workunits/ceph-helpers.sh:331: run_mon: get_config mon a mon_initial_members .../qa/workunits/ceph-helpers.sh:704: get_config: local daemon=mon .../qa/workunits/ceph-helpers.sh:705: get_config: local id=a .../qa/workunits/ceph-helpers.sh:706: get_config: local config=mon_initial_members .../qa/workunits/ceph-helpers.sh:708: get_config: CEPH_ARGS= .../qa/workunits/ceph-helpers.sh:708: get_config: ceph --format xml daemon testdir/osd-crush/ceph-mon.a.asok config get mon_initial_members .../qa/workunits/ceph-helpers.sh:711: get_config: xmlstarlet sel -t -m //mon_initial_members -v . -n ../qa/workunits/ceph-helpers.sh:331: run_mon: test -z '' ../qa/workunits/ceph-helpers.sh:332: run_mon: ceph osd pool delete rbd rbd --yes-i-really-really-mean-it *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** pool 'rbd' removed ../qa/workunits/ceph-helpers.sh:333: run_mon: ceph osd pool create rbd 4 *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** pool 'rbd' created ./test/mon/osd-crush.sh:179: TEST_add_ruleset_failed: local root=host1 ./test/mon/osd-crush.sh:181: TEST_add_ruleset_failed: ./ceph osd crush add-bucket host1 host *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** added bucket host1 type host to crush map ./test/mon/osd-crush.sh:182: TEST_add_ruleset_failed: ./ceph osd crush rule create-simple test_rule1 host1 osd firstn *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** ./test/mon/osd-crush.sh:183: TEST_add_ruleset_failed: ./ceph osd crush rule create-simple test_rule2 host1 osd firstn *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** ./test/mon/osd-crush.sh:184: TEST_add_ruleset_failed: ./ceph osd getcrushmap *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** got crush map from osdmap epoch 6 ./test/mon/osd-crush.sh:185: TEST_add_ruleset_failed: ./crushtool --decompile testdir/osd-crush/crushmap ../test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: seq 3 255 ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:186: TEST_add_ruleset_failed: for i in '$(seq 3 255)' ./test/mon/osd-crush.sh:188: TEST_add_ruleset_failed: cat ./test/mon/osd-crush.sh:200: TEST_add_ruleset_failed: ./crushtool --compile testdir/osd-crush/crushmap.txt -o testdir/osd-crush/crushmap ./test/mon/osd-crush.sh:201: TEST_add_ruleset_failed: ./ceph osd setcrushmap -i testdir/osd-crush/crushmap *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** set crush map ./test/mon/osd-crush.sh:202: TEST_add_ruleset_failed: grep 'Error ENOSPC' ./test/mon/osd-crush.sh:202: TEST_add_ruleset_failed: ./ceph osd crush rule create-simple test_rule_nospace host1 osd firstn Error ENOSPC: failed to add rule 256 because (28) No space left on device ./test/mon/osd-crush.sh:33: run: teardown testdir/osd-crush ../qa/workunits/ceph-helpers.sh:117: teardown: local dir=testdir/osd-crush ../qa/workunits/ceph-helpers.sh:118: teardown: kill_daemons testdir/osd-crush KILL .../qa/workunits/ceph-helpers.sh:195: kill_daemons: shopt -q -o xtrace .../qa/workunits/ceph-helpers.sh:195: kill_daemons: echo true ../qa/workunits/ceph-helpers.sh:195: kill_daemons: local trace=true ../qa/workunits/ceph-helpers.sh:196: kill_daemons: true ../qa/workunits/ceph-helpers.sh:196: kill_daemons: shopt -u -o xtrace ../qa/workunits/ceph-helpers.sh:222: kill_daemons: return 0 .../qa/workunits/ceph-helpers.sh:119: teardown: stat -f -c %T . ../qa/workunits/ceph-helpers.sh:119: teardown: '[' btrfs == btrfs ']' ../qa/workunits/ceph-helpers.sh:120: teardown: __teardown_btrfs testdir/osd-crush ../qa/workunits/ceph-helpers.sh:126: __teardown_btrfs: local btrfs_base_dir=testdir/osd-crush .../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: ls -l testdir/osd-crush .../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: awk '{print $9}' .../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: egrep '^d' ../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: btrfs_dirs=a .../qa/workunits/ceph-helpers.sh:129: __teardown_btrfs: pwd ../qa/workunits/ceph-helpers.sh:129: __teardown_btrfs: current_path=/ceph/src .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: cd testdir/osd-crush .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: btrfs subvolume list . -t .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: egrep '^[0-9]' .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: awk '{print $4}' .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: grep testdir/osd-crush/ ../qa/workunits/ceph-helpers.sh:122: teardown: rm -fr testdir/osd-crush ./test/mon/osd-crush.sh:30: run: for func in '$funcs' ./test/mon/osd-crush.sh:31: run: setup testdir/osd-crush ../qa/workunits/ceph-helpers.sh:92: setup: local dir=testdir/osd-crush ../qa/workunits/ceph-helpers.sh:93: setup: teardown testdir/osd-crush ../qa/workunits/ceph-helpers.sh:117: teardown: local dir=testdir/osd-crush ../qa/workunits/ceph-helpers.sh:118: teardown: kill_daemons testdir/osd-crush KILL .../qa/workunits/ceph-helpers.sh:195: kill_daemons: shopt -q -o xtrace .../qa/workunits/ceph-helpers.sh:195: kill_daemons: echo true ../qa/workunits/ceph-helpers.sh:195: kill_daemons: local trace=true ../qa/workunits/ceph-helpers.sh:196: kill_daemons: true ../qa/workunits/ceph-helpers.sh:196: kill_daemons: shopt -u -o xtrace ../qa/workunits/ceph-helpers.sh:222: kill_daemons: return 0 .../qa/workunits/ceph-helpers.sh:119: teardown: stat -f -c %T . ../qa/workunits/ceph-helpers.sh:119: teardown: '[' btrfs == btrfs ']' ../qa/workunits/ceph-helpers.sh:120: teardown: __teardown_btrfs testdir/osd-crush ../qa/workunits/ceph-helpers.sh:126: __teardown_btrfs: local btrfs_base_dir=testdir/osd-crush .../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: ls -l testdir/osd-crush .../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: egrep '^d' .../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: awk '{print $9}' ls: impossible d'accéder à testdir/osd-crush: Aucun fichier ou dossier de ce type ../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: btrfs_dirs= .../qa/workunits/ceph-helpers.sh:129: __teardown_btrfs: pwd ../qa/workunits/ceph-helpers.sh:129: __teardown_btrfs: current_path=/ceph/src .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: cd testdir/osd-crush ../qa/workunits/ceph-helpers.sh: ligne 131 : cd: testdir/osd-crush: Aucun fichier ou dossier de ce type .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: btrfs subvolume list . -t .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: egrep '^[0-9]' .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: awk '{print $4}' .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: grep testdir/osd-crush/ ../qa/workunits/ceph-helpers.sh:122: teardown: rm -fr testdir/osd-crush ../qa/workunits/ceph-helpers.sh:94: setup: mkdir -p testdir/osd-crush ./test/mon/osd-crush.sh:32: run: TEST_crush_reject_empty testdir/osd-crush ./test/mon/osd-crush.sh:220: TEST_crush_reject_empty: local dir=testdir/osd-crush ./test/mon/osd-crush.sh:221: TEST_crush_reject_empty: run_mon testdir/osd-crush a ../qa/workunits/ceph-helpers.sh:293: run_mon: local dir=testdir/osd-crush ../qa/workunits/ceph-helpers.sh:294: run_mon: shift ../qa/workunits/ceph-helpers.sh:295: run_mon: local id=a ../qa/workunits/ceph-helpers.sh:296: run_mon: shift ../qa/workunits/ceph-helpers.sh:297: run_mon: local data=testdir/osd-crush/a ../qa/workunits/ceph-helpers.sh:300: run_mon: ceph-mon --id a --mkfs --mon-data=testdir/osd-crush/a --run-dir=testdir/osd-crush ceph-mon: mon.noname-a 127.0.0.1:7104/0 is local, renaming to mon.a ceph-mon: set fsid to c4878902-7748-4a77-afab-07655d3b0201 ceph-mon: created monfs at testdir/osd-crush/a for mon.a ../qa/workunits/ceph-helpers.sh:307: run_mon: ceph-mon --id a --mon-osd-full-ratio=.99 --mon-data-avail-crit=1 --paxos-propose-interval=0.1 --osd-crush-chooseleaf-type=0 --erasure-code-dir=.libs --plugin-dir=.libs --debug-mon 20 --debug-ms 20 --debug-paxos 20 --chdir= --mon-data=testdir/osd-crush/a '--log-file=testdir/osd-crush/$name.log' '--admin-socket=testdir/osd-crush/$cluster-$name.asok' --mon-cluster-log-file=testdir/osd-crush/log --run-dir=testdir/osd-crush '--pid-file=testdir/osd-crush/$name.pid' ../qa/workunits/ceph-helpers.sh:326: run_mon: cat .../qa/workunits/ceph-helpers.sh:326: run_mon: get_config mon a fsid .../qa/workunits/ceph-helpers.sh:704: get_config: local daemon=mon .../qa/workunits/ceph-helpers.sh:705: get_config: local id=a .../qa/workunits/ceph-helpers.sh:706: get_config: local config=fsid .../qa/workunits/ceph-helpers.sh:708: get_config: CEPH_ARGS= .../qa/workunits/ceph-helpers.sh:708: get_config: ceph --format xml daemon testdir/osd-crush/ceph-mon.a.asok config get fsid .../qa/workunits/ceph-helpers.sh:711: get_config: xmlstarlet sel -t -m //fsid -v . -n .../qa/workunits/ceph-helpers.sh:326: run_mon: get_config mon a mon_host .../qa/workunits/ceph-helpers.sh:704: get_config: local daemon=mon .../qa/workunits/ceph-helpers.sh:705: get_config: local id=a .../qa/workunits/ceph-helpers.sh:706: get_config: local config=mon_host .../qa/workunits/ceph-helpers.sh:708: get_config: CEPH_ARGS= .../qa/workunits/ceph-helpers.sh:708: get_config: ceph --format xml daemon testdir/osd-crush/ceph-mon.a.asok config get mon_host .../qa/workunits/ceph-helpers.sh:711: get_config: xmlstarlet sel -t -m //mon_host -v . -n .../qa/workunits/ceph-helpers.sh:331: run_mon: get_config mon a mon_initial_members .../qa/workunits/ceph-helpers.sh:704: get_config: local daemon=mon .../qa/workunits/ceph-helpers.sh:705: get_config: local id=a .../qa/workunits/ceph-helpers.sh:706: get_config: local config=mon_initial_members .../qa/workunits/ceph-helpers.sh:708: get_config: CEPH_ARGS= .../qa/workunits/ceph-helpers.sh:708: get_config: ceph --format xml daemon testdir/osd-crush/ceph-mon.a.asok config get mon_initial_members .../qa/workunits/ceph-helpers.sh:711: get_config: xmlstarlet sel -t -m //mon_initial_members -v . -n ../qa/workunits/ceph-helpers.sh:331: run_mon: test -z '' ../qa/workunits/ceph-helpers.sh:332: run_mon: ceph osd pool delete rbd rbd --yes-i-really-really-mean-it *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** pool 'rbd' removed ../qa/workunits/ceph-helpers.sh:333: run_mon: ceph osd pool create rbd 4 *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** pool 'rbd' created ./test/mon/osd-crush.sh:223: TEST_crush_reject_empty: run_osd testdir/osd-crush 0 ../qa/workunits/ceph-helpers.sh:409: run_osd: local dir=testdir/osd-crush ../qa/workunits/ceph-helpers.sh:410: run_osd: shift ../qa/workunits/ceph-helpers.sh:411: run_osd: local id=0 ../qa/workunits/ceph-helpers.sh:412: run_osd: shift ../qa/workunits/ceph-helpers.sh:413: run_osd: local osd_data=testdir/osd-crush/0 ../qa/workunits/ceph-helpers.sh:415: run_osd: local ceph_disk_args ../qa/workunits/ceph-helpers.sh:416: run_osd: ceph_disk_args+=' --statedir=testdir/osd-crush' ../qa/workunits/ceph-helpers.sh:417: run_osd: ceph_disk_args+=' --sysconfdir=testdir/osd-crush' ../qa/workunits/ceph-helpers.sh:418: run_osd: ceph_disk_args+=' --prepend-to-path=' ../qa/workunits/ceph-helpers.sh:420: run_osd: mkdir -p testdir/osd-crush/0 ../qa/workunits/ceph-helpers.sh:421: run_osd: ceph-disk --statedir=testdir/osd-crush --sysconfdir=testdir/osd-crush --prepend-to-path= prepare testdir/osd-crush/0 ../qa/workunits/ceph-helpers.sh:424: run_osd: activate_osd testdir/osd-crush 0 ../qa/workunits/ceph-helpers.sh:528: activate_osd: local dir=testdir/osd-crush ../qa/workunits/ceph-helpers.sh:529: activate_osd: shift ../qa/workunits/ceph-helpers.sh:530: activate_osd: local id=0 ../qa/workunits/ceph-helpers.sh:531: activate_osd: shift ../qa/workunits/ceph-helpers.sh:532: activate_osd: local osd_data=testdir/osd-crush/0 ../qa/workunits/ceph-helpers.sh:534: activate_osd: local ceph_disk_args ../qa/workunits/ceph-helpers.sh:535: activate_osd: ceph_disk_args+=' --statedir=testdir/osd-crush' ../qa/workunits/ceph-helpers.sh:536: activate_osd: ceph_disk_args+=' --sysconfdir=testdir/osd-crush' ../qa/workunits/ceph-helpers.sh:537: activate_osd: ceph_disk_args+=' --prepend-to-path=' ../qa/workunits/ceph-helpers.sh:539: activate_osd: local 'ceph_args=--fsid=c4878902-7748-4a77-afab-07655d3b0201 --auth-supported=none --mon-host=127.0.0.1:7104 ' ../qa/workunits/ceph-helpers.sh:540: activate_osd: ceph_args+=' --osd-backfill-full-ratio=.99' ../qa/workunits/ceph-helpers.sh:541: activate_osd: ceph_args+=' --osd-failsafe-full-ratio=.99' ../qa/workunits/ceph-helpers.sh:542: activate_osd: ceph_args+=' --osd-journal-size=100' ../qa/workunits/ceph-helpers.sh:543: activate_osd: ceph_args+=' --osd-scrub-load-threshold=2000' ../qa/workunits/ceph-helpers.sh:544: activate_osd: ceph_args+=' --osd-data=testdir/osd-crush/0' ../qa/workunits/ceph-helpers.sh:545: activate_osd: ceph_args+=' --chdir=' ../qa/workunits/ceph-helpers.sh:546: activate_osd: ceph_args+=' --erasure-code-dir=.libs' ../qa/workunits/ceph-helpers.sh:547: activate_osd: ceph_args+=' --plugin-dir=.libs' ../qa/workunits/ceph-helpers.sh:548: activate_osd: ceph_args+=' --osd-class-dir=.libs' ../qa/workunits/ceph-helpers.sh:549: activate_osd: ceph_args+=' --run-dir=testdir/osd-crush' ../qa/workunits/ceph-helpers.sh:550: activate_osd: ceph_args+=' --debug-osd=20' ../qa/workunits/ceph-helpers.sh:551: activate_osd: ceph_args+=' --log-file=testdir/osd-crush/$name.log' ../qa/workunits/ceph-helpers.sh:552: activate_osd: ceph_args+=' --pid-file=testdir/osd-crush/$name.pid' ../qa/workunits/ceph-helpers.sh:553: activate_osd: ceph_args+=' ' ../qa/workunits/ceph-helpers.sh:554: activate_osd: ceph_args+= ../qa/workunits/ceph-helpers.sh:555: activate_osd: mkdir -p testdir/osd-crush/0 ../qa/workunits/ceph-helpers.sh:556: activate_osd: CEPH_ARGS='--fsid=c4878902-7748-4a77-afab-07655d3b0201 --auth-supported=none --mon-host=127.0.0.1:7104 --osd-backfill-full-ratio=.99 --osd-failsafe-full-ratio=.99 --osd-journal-size=100 --osd-scrub-load-threshold=2000 --osd-data=testdir/osd-crush/0 --chdir= --erasure-code-dir=.libs --plugin-dir=.libs --osd-class-dir=.libs --run-dir=testdir/osd-crush --debug-osd=20 --log-file=testdir/osd-crush/$name.log --pid-file=testdir/osd-crush/$name.pid ' ../qa/workunits/ceph-helpers.sh:556: activate_osd: ceph-disk --statedir=testdir/osd-crush --sysconfdir=testdir/osd-crush --prepend-to-path= activate --mark-init=none testdir/osd-crush/0 *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** got monmap epoch 1 2016-03-08 15:19:03.773892 7f8b3b3c0940 -1 journal FileJournal::_open: disabling aio for non-block journal. Use journal_force_aio to force use of aio anyway 2016-03-08 15:19:03.918681 7f8b3b3c0940 -1 journal FileJournal::_open: disabling aio for non-block journal. Use journal_force_aio to force use of aio anyway 2016-03-08 15:19:03.919062 7f8b3b3c0940 -1 filestore(testdir/osd-crush/0) could not find !-1:7b3f43c4:::osd_superblock:0! in index: (2) No such file or directory 2016-03-08 15:19:03.978574 7f8b3b3c0940 -1 created object store testdir/osd-crush/0 for osd.0 fsid c4878902-7748-4a77-afab-07655d3b0201 2016-03-08 15:19:03.978601 7f8b3b3c0940 -1 auth: error reading file: testdir/osd-crush/0/keyring: can't open testdir/osd-crush/0/keyring: (2) No such file or directory 2016-03-08 15:19:03.978733 7f8b3b3c0940 -1 created new key in keyring testdir/osd-crush/0/keyring *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** added key for osd.0 starting osd.0 at :/0 osd_data testdir/osd-crush/0 testdir/osd-crush/0/journal .../qa/workunits/ceph-helpers.sh:561: activate_osd: cat testdir/osd-crush/0/whoami ../qa/workunits/ceph-helpers.sh:561: activate_osd: '[' 0 = 0 ']' ../qa/workunits/ceph-helpers.sh:563: activate_osd: ceph osd crush create-or-move 0 1 root=default host=localhost *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** create-or-move updating item name 'osd.0' weight 1 at location {host=localhost,root=default} to crush map ../qa/workunits/ceph-helpers.sh:565: activate_osd: wait_for_osd up 0 ../qa/workunits/ceph-helpers.sh:601: wait_for_osd: local state=up ../qa/workunits/ceph-helpers.sh:602: wait_for_osd: local id=0 ../qa/workunits/ceph-helpers.sh:604: wait_for_osd: status=1 ../qa/workunits/ceph-helpers.sh:605: wait_for_osd: (( i=0 )) ../qa/workunits/ceph-helpers.sh:605: wait_for_osd: (( i < 300 )) ../qa/workunits/ceph-helpers.sh:606: wait_for_osd: ceph osd dump ../qa/workunits/ceph-helpers.sh:606: wait_for_osd: grep 'osd.0 up' *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** osd.0 up in weight 1 up_from 6 up_thru 0 down_at 0 last_clean_interval [0,0) 127.0.0.1:6800/4256 127.0.0.1:6801/4256 127.0.0.1:6802/4256 127.0.0.1:6803/4256 exists,up ad2b7a59-c78c-449e-86c6-e537c3b12550 ../qa/workunits/ceph-helpers.sh:609: wait_for_osd: status=0 ../qa/workunits/ceph-helpers.sh:610: wait_for_osd: break ../qa/workunits/ceph-helpers.sh:613: wait_for_osd: return 0 ./test/mon/osd-crush.sh:225: TEST_crush_reject_empty: local empty_map=testdir/osd-crush/empty_map ./test/mon/osd-crush.sh:226: TEST_crush_reject_empty: touch testdir/osd-crush/empty_map.txt ./test/mon/osd-crush.sh:227: TEST_crush_reject_empty: ./crushtool -c testdir/osd-crush/empty_map.txt -o testdir/osd-crush/empty_map.map ./test/mon/osd-crush.sh:228: TEST_crush_reject_empty: cp testdir/osd-crush/empty_map.txt /tmp ./test/mon/osd-crush.sh:229: TEST_crush_reject_empty: cp testdir/osd-crush/empty_map.map /tmp ./test/mon/osd-crush.sh:230: TEST_crush_reject_empty: expect_failure testdir/osd-crush 'Error EINVAL' ./ceph osd setcrushmap -i testdir/osd-crush/empty_map.map ../qa/workunits/ceph-helpers.sh:1146: expect_failure: local dir=testdir/osd-crush ../qa/workunits/ceph-helpers.sh:1147: expect_failure: shift ../qa/workunits/ceph-helpers.sh:1148: expect_failure: local 'expected=Error EINVAL' ../qa/workunits/ceph-helpers.sh:1149: expect_failure: shift ../qa/workunits/ceph-helpers.sh:1150: expect_failure: local success ../qa/workunits/ceph-helpers.sh:1152: expect_failure: ./ceph osd setcrushmap -i testdir/osd-crush/empty_map.map ../qa/workunits/ceph-helpers.sh:1155: expect_failure: success=false ../qa/workunits/ceph-helpers.sh:1158: expect_failure: false ../qa/workunits/ceph-helpers.sh:1158: expect_failure: grep --quiet 'Error EINVAL' testdir/osd-crush/out ../qa/workunits/ceph-helpers.sh:1159: expect_failure: cat testdir/osd-crush/out *** DEVELOPER MODE: setting PATH, PYTHONPATH and LD_LIBRARY_PATH *** Traceback (most recent call last): File "./ceph", line 953, in retval = main() File "./ceph", line 887, in main sigdict, inbuf, verbose) File "./ceph", line 470, in new_style_command inbuf=inbuf) File "/ceph/src/pybind/ceph_argparse.py", line 1270, in json_command raise RuntimeError('"{0}": exception {1}'.format(argdict, e)) RuntimeError: "{'prefix': u'osd setcrushmap'}": exception "['{"prefix": "osd setcrushmap"}']": exception 'utf8' codec can't decode byte 0xef in position 23: invalid continuation byte ../qa/workunits/ceph-helpers.sh:1160: expect_failure: return 1 ./test/mon/osd-crush.sh:231: TEST_crush_reject_empty: return 1 ./test/mon/osd-crush.sh:32: run: return 1 ../qa/workunits/ceph-helpers.sh:1325: main: display_logs testdir/osd-crush ../qa/workunits/ceph-helpers.sh:1264: display_logs: local dir=testdir/osd-crush ../qa/workunits/ceph-helpers.sh:1266: display_logs: find testdir/osd-crush -maxdepth 1 -name '*.log' ../qa/workunits/ceph-helpers.sh:1267: display_logs: read file ../qa/workunits/ceph-helpers.sh:1268: display_logs: echo '======================= testdir/osd-crush/mon.a.log' ======================= testdir/osd-crush/mon.a.log ../qa/workunits/ceph-helpers.sh:1269: display_logs: cat testdir/osd-crush/mon.a.log 2016-03-08 15:19:02.066567 7f82aea5c5c0 0 ceph version 10.0.4-1828-g754d210 (754d2103e1c504377bebc72430c00f931fa397eb), process ceph-mon, pid 3519 2016-03-08 15:19:02.074639 7f82aea5c5c0 10 load: jerasure load: lrc load: isa 2016-03-08 15:19:02.074785 7f82aea5c5c0 1 leveldb: Recovering log #3 2016-03-08 15:19:02.074810 7f82aea5c5c0 1 leveldb: Level-0 table #5: started 2016-03-08 15:19:02.081603 7f82aea5c5c0 1 leveldb: Level-0 table #5: 559 bytes OK 2016-03-08 15:19:02.095508 7f82aea5c5c0 1 leveldb: Delete type=0 #3 2016-03-08 15:19:02.095563 7f82aea5c5c0 1 leveldb: Delete type=3 #2 2016-03-08 15:19:02.095677 7f82aea5c5c0 10 obtain_monmap 2016-03-08 15:19:02.095690 7f82aea5c5c0 10 obtain_monmap found mkfs monmap 2016-03-08 15:19:02.095742 7f82aea5c5c0 0 starting mon.a rank 0 at 127.0.0.1:7104/0 mon_data testdir/osd-crush/a fsid c4878902-7748-4a77-afab-07655d3b0201 2016-03-08 15:19:02.095767 7f82aea5c5c0 10 -- :/0 rank.bind 127.0.0.1:7104/0 2016-03-08 15:19:02.095769 7f82aea5c5c0 10 accepter.accepter.bind 2016-03-08 15:19:02.095809 7f82aea5c5c0 10 accepter.accepter.bind bound to 127.0.0.1:7104/0 2016-03-08 15:19:02.095815 7f82aea5c5c0 1 -- 127.0.0.1:7104/0 learned my addr 127.0.0.1:7104/0 2016-03-08 15:19:02.095817 7f82aea5c5c0 1 accepter.accepter.bind my_inst.addr is 127.0.0.1:7104/0 need_addr=0 2016-03-08 15:19:02.095934 7f82aea5c5c0 1 mon.a@-1(probing) e0 preinit fsid c4878902-7748-4a77-afab-07655d3b0201 2016-03-08 15:19:02.095977 7f82aea5c5c0 10 mon.a@-1(probing) e0 check_fsid cluster_uuid contains 'c4878902-7748-4a77-afab-07655d3b0201' 2016-03-08 15:19:02.095985 7f82aea5c5c0 10 mon.a@-1(probing) e0 features compat={},rocompat={},incompat={1=initial feature set (~v.18),3=single paxos with k/v store (v0.?)} 2016-03-08 15:19:02.095988 7f82aea5c5c0 10 mon.a@-1(probing) e0 apply_compatset_features_to_quorum_requirements required_features 0 2016-03-08 15:19:02.095989 7f82aea5c5c0 10 mon.a@-1(probing) e0 required_features 0 2016-03-08 15:19:02.095992 7f82aea5c5c0 10 mon.a@-1(probing) e0 has_ever_joined = 0 2016-03-08 15:19:02.095999 7f82aea5c5c0 10 mon.a@-1(probing) e0 sync_last_committed_floor 0 2016-03-08 15:19:02.096000 7f82aea5c5c0 10 mon.a@-1(probing) e0 init_paxos 2016-03-08 15:19:02.096005 7f82aea5c5c0 10 mon.a@-1(probing).paxos(paxos recovering c 0..0) init last_pn: 0 accepted_pn: 0 last_committed: 0 first_committed: 0 2016-03-08 15:19:02.096008 7f82aea5c5c0 10 mon.a@-1(probing).paxos(paxos recovering c 0..0) init 2016-03-08 15:19:02.096040 7f82aea5c5c0 10 mon.a@-1(probing) e0 refresh_from_paxos 2016-03-08 15:19:02.096044 7f82aea5c5c0 10 mon.a@-1(probing) e0 refresh_from_paxos no cluster_fingerprint 2016-03-08 15:19:02.096049 7f82aea5c5c0 10 mon.a@-1(probing).paxosservice(pgmap 0..0) refresh 2016-03-08 15:19:02.096054 7f82aea5c5c0 10 mon.a@-1(probing).paxosservice(mdsmap 0..0) refresh 2016-03-08 15:19:02.096067 7f82aea5c5c0 10 mon.a@-1(probing).paxosservice(osdmap 0..0) refresh 2016-03-08 15:19:02.096076 7f82aea5c5c0 10 mon.a@-1(probing).paxosservice(logm 0..0) refresh 2016-03-08 15:19:02.096077 7f82aea5c5c0 10 mon.a@-1(probing).log v0 update_from_paxos 2016-03-08 15:19:02.096078 7f82aea5c5c0 10 mon.a@-1(probing).log v0 update_from_paxos version 0 summary v 0 2016-03-08 15:19:02.096083 7f82aea5c5c0 10 mon.a@-1(probing).paxosservice(monmap 0..0) refresh 2016-03-08 15:19:02.096091 7f82aea5c5c0 10 mon.a@-1(probing).paxosservice(auth 0..0) refresh 2016-03-08 15:19:02.096093 7f82aea5c5c0 10 mon.a@-1(probing).auth v0 update_from_paxos 2016-03-08 15:19:02.096094 7f82aea5c5c0 10 mon.a@-1(probing).paxosservice(pgmap 0..0) post_refresh 2016-03-08 15:19:02.096099 7f82aea5c5c0 10 mon.a@-1(probing).pg v0 post_paxos_update 2016-03-08 15:19:02.096100 7f82aea5c5c0 10 mon.a@-1(probing).paxosservice(mdsmap 0..0) post_refresh 2016-03-08 15:19:02.096101 7f82aea5c5c0 10 mon.a@-1(probing).paxosservice(osdmap 0..0) post_refresh 2016-03-08 15:19:02.096101 7f82aea5c5c0 10 mon.a@-1(probing).paxosservice(logm 0..0) post_refresh 2016-03-08 15:19:02.096102 7f82aea5c5c0 10 mon.a@-1(probing).paxosservice(monmap 0..0) post_refresh 2016-03-08 15:19:02.096103 7f82aea5c5c0 10 mon.a@-1(probing).paxosservice(auth 0..0) post_refresh 2016-03-08 15:19:02.096108 7f82aea5c5c0 10 mon.a@-1(probing).health(0) init 2016-03-08 15:19:02.096174 7f82aea5c5c0 1 -- 127.0.0.1:7104/0 messenger.start 2016-03-08 15:19:02.096196 7f82aea5c5c0 2 mon.a@-1(probing) e0 init 2016-03-08 15:19:02.096217 7f82aea5c5c0 10 -- 127.0.0.1:7104/0 ready 127.0.0.1:7104/0 2016-03-08 15:19:02.096217 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper_entry start 2016-03-08 15:19:02.096247 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper 2016-03-08 15:19:02.096250 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper done 2016-03-08 15:19:02.096260 7f82aea5c5c0 1 accepter.accepter.start 2016-03-08 15:19:02.096277 7f82aea5c5c0 10 mon.a@-1(probing) e0 bootstrap 2016-03-08 15:19:02.096280 7f82aea5c5c0 10 mon.a@-1(probing) e0 sync_reset_requester 2016-03-08 15:19:02.096281 7f82aea5c5c0 10 mon.a@-1(probing) e0 unregister_cluster_logger - not registered 2016-03-08 15:19:02.096282 7f82aea5c5c0 10 mon.a@-1(probing) e0 cancel_probe_timeout (none scheduled) 2016-03-08 15:19:02.096283 7f82aea5c5c0 0 mon.a@-1(probing) e0 my rank is now 0 (was -1) 2016-03-08 15:19:02.096284 7f82aea5c5c0 1 -- 127.0.0.1:7104/0 mark_down_all 2016-03-08 15:19:02.096285 7f82a4350700 10 accepter.accepter starting 2016-03-08 15:19:02.096287 7f82a4350700 20 accepter.accepter calling poll 2016-03-08 15:19:02.096286 7f82aea5c5c0 10 mon.a@0(probing) e0 _reset 2016-03-08 15:19:02.096287 7f82aea5c5c0 10 mon.a@0(probing) e0 cancel_probe_timeout (none scheduled) 2016-03-08 15:19:02.096288 7f82aea5c5c0 10 mon.a@0(probing) e0 timecheck_finish 2016-03-08 15:19:02.096289 7f82aea5c5c0 15 mon.a@0(probing) e0 health_tick_stop 2016-03-08 15:19:02.096290 7f82aea5c5c0 15 mon.a@0(probing) e0 health_interval_stop 2016-03-08 15:19:02.096291 7f82aea5c5c0 10 mon.a@0(probing) e0 scrub_event_cancel 2016-03-08 15:19:02.096292 7f82aea5c5c0 10 mon.a@0(probing) e0 scrub_reset 2016-03-08 15:19:02.096293 7f82aea5c5c0 10 mon.a@0(probing).paxos(paxos recovering c 0..0) restart -- canceling timeouts 2016-03-08 15:19:02.096295 7f82aea5c5c0 10 mon.a@0(probing).paxosservice(pgmap 0..0) restart 2016-03-08 15:19:02.096297 7f82aea5c5c0 10 mon.a@0(probing).paxosservice(mdsmap 0..0) restart 2016-03-08 15:19:02.096298 7f82aea5c5c0 10 mon.a@0(probing).paxosservice(osdmap 0..0) restart 2016-03-08 15:19:02.096298 7f82aea5c5c0 10 mon.a@0(probing).paxosservice(logm 0..0) restart 2016-03-08 15:19:02.096299 7f82aea5c5c0 10 mon.a@0(probing).paxosservice(monmap 0..0) restart 2016-03-08 15:19:02.096300 7f82aea5c5c0 10 mon.a@0(probing).paxosservice(auth 0..0) restart 2016-03-08 15:19:02.096302 7f82aea5c5c0 1 mon.a@0(probing) e0 win_standalone_election 2016-03-08 15:19:02.096314 7f82aea5c5c0 1 mon.a@0(probing).elector(1) init, last seen epoch 1 2016-03-08 15:19:02.096316 7f82aea5c5c0 10 mon.a@0(probing).elector(1) bump_epoch 1 to 2 2016-03-08 15:19:02.102439 7f82aea5c5c0 10 mon.a@0(probing) e0 join_election 2016-03-08 15:19:02.102454 7f82aea5c5c0 10 mon.a@0(probing) e0 _reset 2016-03-08 15:19:02.102455 7f82aea5c5c0 10 mon.a@0(probing) e0 cancel_probe_timeout (none scheduled) 2016-03-08 15:19:02.102456 7f82aea5c5c0 10 mon.a@0(probing) e0 timecheck_finish 2016-03-08 15:19:02.102457 7f82aea5c5c0 15 mon.a@0(probing) e0 health_tick_stop 2016-03-08 15:19:02.102458 7f82aea5c5c0 15 mon.a@0(probing) e0 health_interval_stop 2016-03-08 15:19:02.102459 7f82aea5c5c0 10 mon.a@0(probing) e0 scrub_event_cancel 2016-03-08 15:19:02.102460 7f82aea5c5c0 10 mon.a@0(probing) e0 scrub_reset 2016-03-08 15:19:02.102461 7f82aea5c5c0 10 mon.a@0(probing).paxos(paxos recovering c 0..0) restart -- canceling timeouts 2016-03-08 15:19:02.102464 7f82aea5c5c0 10 mon.a@0(probing).paxosservice(pgmap 0..0) restart 2016-03-08 15:19:02.102466 7f82aea5c5c0 10 mon.a@0(probing).paxosservice(mdsmap 0..0) restart 2016-03-08 15:19:02.102469 7f82aea5c5c0 10 mon.a@0(probing).paxosservice(osdmap 0..0) restart 2016-03-08 15:19:02.102470 7f82aea5c5c0 10 mon.a@0(probing).paxosservice(logm 0..0) restart 2016-03-08 15:19:02.102470 7f82aea5c5c0 10 mon.a@0(probing).paxosservice(monmap 0..0) restart 2016-03-08 15:19:02.102471 7f82aea5c5c0 10 mon.a@0(probing).paxosservice(auth 0..0) restart 2016-03-08 15:19:02.102485 7f82aea5c5c0 10 mon.a@0(electing) e0 win_election epoch 2 quorum 0 features 576460752303423487 2016-03-08 15:19:02.102500 7f82aea5c5c0 0 log_channel(cluster) log [INF] : mon.a@0 won leader election with quorum 0 2016-03-08 15:19:02.102524 7f82aea5c5c0 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 1 at 2016-03-08 15:19:02.102501) v1 -- ?+0 0x559cd1560f00 con 0x559cd1477080 2016-03-08 15:19:02.102530 7f82aea5c5c0 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 1 at 2016-03-08 15:19:02.102501) v1 local 2016-03-08 15:19:02.102539 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(monmap 0..0) election_finished 2016-03-08 15:19:02.102541 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(monmap 0..0) _active 2016-03-08 15:19:02.102542 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(monmap 0..0) remove_legacy_versions 2016-03-08 15:19:02.102558 7f82aea5c5c0 7 mon.a@0(leader).paxosservice(monmap 0..0) _active creating new pending 2016-03-08 15:19:02.102561 7f82aea5c5c0 10 mon.a@0(leader).monmap v0 create_pending monmap epoch 1 2016-03-08 15:19:02.102562 7f82aea5c5c0 10 mon.a@0(leader).monmap v0 create_initial using current monmap 2016-03-08 15:19:02.102563 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(monmap 0..0) propose_pending 2016-03-08 15:19:02.102569 7f82aea5c5c0 10 mon.a@0(leader).monmap v0 encode_pending epoch 1 2016-03-08 15:19:02.102581 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 1 at 2016-03-08 15:19:02.102501) v1 ==== 0+0+0 (0 0 0) 0x559cd1560f00 con 0x559cd1477080 2016-03-08 15:19:02.102601 7f82aea5c5c0 10 mon.a@0(leader) e0 prepare_new_fingerprint proposing cluster_fingerprint ba7913fe-b8e6-446b-b668-ca77187d7417 2016-03-08 15:19:02.102605 7f82aea5c5c0 5 mon.a@0(leader).paxos(paxos active c 0..0) queue_pending_finisher 0x559cd14941a0 2016-03-08 15:19:02.102607 7f82aea5c5c0 10 mon.a@0(leader).paxos(paxos active c 0..0) trigger_propose active, proposing now 2016-03-08 15:19:02.102615 7f82aea5c5c0 10 mon.a@0(leader).paxos(paxos active c 0..0) propose_pending 1 411 bytes 2016-03-08 15:19:02.102618 7f82aea5c5c0 10 mon.a@0(leader).paxos(paxos updating c 0..0) begin for 1 411 bytes 2016-03-08 15:19:02.113787 7f82aea5c5c0 10 mon.a@0(leader).paxos(paxos updating c 0..0) commit_start 1 2016-03-08 15:19:02.113835 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(pgmap 0..0) election_finished 2016-03-08 15:19:02.113837 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(pgmap 0..0) _active 2016-03-08 15:19:02.113838 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(pgmap 0..0) remove_legacy_versions 2016-03-08 15:19:02.113850 7f82aea5c5c0 7 mon.a@0(leader).paxosservice(pgmap 0..0) _active creating new pending 2016-03-08 15:19:02.113852 7f82aea5c5c0 10 mon.a@0(leader).pg v0 create_pending v 1 2016-03-08 15:19:02.113853 7f82aea5c5c0 10 mon.a@0(leader).pg v0 create_initial -- creating initial map 2016-03-08 15:19:02.113854 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(pgmap 0..0) propose_pending 2016-03-08 15:19:02.113855 7f82aea5c5c0 10 mon.a@0(leader).pg v0 encode_pending v 1 2016-03-08 15:19:02.113872 7f82aea5c5c0 5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0x559cd14941c0 2016-03-08 15:19:02.113877 7f82aea5c5c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later 2016-03-08 15:19:02.113878 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(mdsmap 0..0) election_finished 2016-03-08 15:19:02.113879 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(mdsmap 0..0) _active 2016-03-08 15:19:02.113880 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(mdsmap 0..0) remove_legacy_versions 2016-03-08 15:19:02.113887 7f82aea5c5c0 7 mon.a@0(leader).paxosservice(mdsmap 0..0) _active creating new pending 2016-03-08 15:19:02.113890 7f82aea5c5c0 10 mon.a@0(leader).mds e0 create_pending e1 2016-03-08 15:19:02.113892 7f82aea5c5c0 10 mon.a@0(leader).mds e0 create_initial 2016-03-08 15:19:02.113893 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(mdsmap 0..0) propose_pending 2016-03-08 15:19:02.113897 7f82aea5c5c0 10 mon.a@0(leader).mds e0 encode_pending e1 2016-03-08 15:19:02.113907 7f82aea5c5c0 5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0x559cd14941d0 2016-03-08 15:19:02.113909 7f82aea5c5c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later 2016-03-08 15:19:02.113910 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(osdmap 0..0) election_finished 2016-03-08 15:19:02.113911 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(osdmap 0..0) _active 2016-03-08 15:19:02.113911 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(osdmap 0..0) remove_legacy_versions 2016-03-08 15:19:02.113913 7f82aea5c5c0 7 mon.a@0(leader).paxosservice(osdmap 0..0) _active creating new pending 2016-03-08 15:19:02.113922 7f82aea5c5c0 10 mon.a@0(leader).osd e0 create_pending e 1 2016-03-08 15:19:02.113931 7f82aea5c5c0 10 mon.a@0(leader).osd e0 create_initial for c4878902-7748-4a77-afab-07655d3b0201 2016-03-08 15:19:02.114118 7f82aea5c5c0 20 mon.a@0(leader).osd e0 full crc 1072913041 2016-03-08 15:19:02.114125 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(osdmap 0..0) propose_pending 2016-03-08 15:19:02.114126 7f82aea5c5c0 10 mon.a@0(leader).osd e0 encode_pending e 1 2016-03-08 15:19:02.114162 7f82aea5c5c0 20 mon.a@0(leader).osd e0 full_crc 1072913041 inc_crc 465722392 2016-03-08 15:19:02.114182 7f82aea5c5c0 5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0x559cd14941f0 2016-03-08 15:19:02.114183 7f82aea5c5c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later 2016-03-08 15:19:02.114194 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(logm 0..0) election_finished 2016-03-08 15:19:02.114195 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(logm 0..0) _active 2016-03-08 15:19:02.114195 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(logm 0..0) remove_legacy_versions 2016-03-08 15:19:02.114201 7f82aea5c5c0 7 mon.a@0(leader).paxosservice(logm 0..0) _active creating new pending 2016-03-08 15:19:02.114202 7f82aea5c5c0 10 mon.a@0(leader).log v0 create_pending v 1 2016-03-08 15:19:02.114204 7f82aea5c5c0 10 mon.a@0(leader).log v0 create_initial -- creating initial map 2016-03-08 15:19:02.114207 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(logm 0..0) propose_pending 2016-03-08 15:19:02.114208 7f82aea5c5c0 10 mon.a@0(leader).log v0 encode_full log v 0 2016-03-08 15:19:02.114211 7f82aea5c5c0 10 mon.a@0(leader).log v0 encode_pending v1 2016-03-08 15:19:02.114221 7f82aea5c5c0 5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0x559cd1494200 2016-03-08 15:19:02.114223 7f82aea5c5c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later 2016-03-08 15:19:02.114223 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(auth 0..0) election_finished 2016-03-08 15:19:02.114224 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(auth 0..0) _active 2016-03-08 15:19:02.114225 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(auth 0..0) remove_legacy_versions 2016-03-08 15:19:02.114227 7f82aea5c5c0 7 mon.a@0(leader).paxosservice(auth 0..0) _active creating new pending 2016-03-08 15:19:02.114228 7f82aea5c5c0 10 mon.a@0(leader).auth v0 create_pending v 1 2016-03-08 15:19:02.114233 7f82aea5c5c0 10 mon.a@0(leader).auth v0 create_initial -- creating initial map 2016-03-08 15:19:02.114433 7f82aea5c5c0 10 mon.a@0(leader).auth v0 check_rotate updated rotating 2016-03-08 15:19:02.114441 7f82aea5c5c0 10 mon.a@0(leader).paxosservice(auth 0..0) propose_pending 2016-03-08 15:19:02.114446 7f82aea5c5c0 10 mon.a@0(leader).auth v0 encode_pending v 1 2016-03-08 15:19:02.114455 7f82aea5c5c0 5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0x559cd14942d0 2016-03-08 15:19:02.114457 7f82aea5c5c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later 2016-03-08 15:19:02.114459 7f82aea5c5c0 10 mon.a@0(leader).data_health(2) start_epoch epoch 2 2016-03-08 15:19:02.114464 7f82aea5c5c0 1 mon.a@0(leader) e0 apply_quorum_to_compatset_features enabling new quorum features: compat={},rocompat={},incompat={4=support erasure code pools,5=new-style osdmap encoding,6=support isa/lrc erasure code,7=support shec erasure code} 2016-03-08 15:19:02.127049 7f82aea5c5c0 10 mon.a@0(leader) e0 apply_compatset_features_to_quorum_requirements required_features 9025616074506240 2016-03-08 15:19:02.127079 7f82aea5c5c0 10 mon.a@0(leader) e0 timecheck_finish 2016-03-08 15:19:02.127081 7f82aea5c5c0 10 mon.a@0(leader) e0 resend_routed_requests 2016-03-08 15:19:02.127082 7f82aea5c5c0 10 mon.a@0(leader) e0 register_cluster_logger 2016-03-08 15:19:02.159910 7f82aea5c5c0 10 -- 127.0.0.1:7104/0 wait: waiting for dispatch queue 2016-03-08 15:19:02.159943 7f82a5352700 10 mon.a@0(leader) e0 _ms_dispatch new session 0x559cd14d6a00 MonSession(mon.0 127.0.0.1:7104/0 is open) 2016-03-08 15:19:02.159958 7f82a5352700 5 mon.a@0(leader) e0 _ms_dispatch setting monitor caps on this connection 2016-03-08 15:19:02.159965 7f82a5352700 20 mon.a@0(leader) e0 caps allow * 2016-03-08 15:19:02.159976 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 0..0) dispatch 0x559cd1560f00 log(1 entries from seq 1 at 2016-03-08 15:19:02.102501) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.159982 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 0..0) is_readable = 0 - now=2016-03-08 15:19:02.159982 lease_expire=0.000000 has v0 lc 0 2016-03-08 15:19:02.159985 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 0..0) waiting for paxos -> readable (v0) 2016-03-08 15:19:02.159995 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1560f00 2016-03-08 15:19:02.160012 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 0..0) commit_finish 1 2016-03-08 15:19:02.160062 7f82a6b55700 10 mon.a@0(leader) e0 refresh_from_paxos 2016-03-08 15:19:02.160077 7f82a6b55700 1 mon.a@0(leader).paxosservice(pgmap 0..0) refresh upgraded, format 1 -> 0 2016-03-08 15:19:02.160080 7f82a6b55700 1 mon.a@0(leader).pg v0 on_upgrade discarding in-core PGMap 2016-03-08 15:19:02.160085 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 0..0) refresh 2016-03-08 15:19:02.160092 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 0..0) refresh 2016-03-08 15:19:02.160097 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 0..0) refresh 2016-03-08 15:19:02.160102 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 0..0) refresh 2016-03-08 15:19:02.160104 7f82a6b55700 10 mon.a@0(leader).log v0 update_from_paxos 2016-03-08 15:19:02.160106 7f82a6b55700 10 mon.a@0(leader).log v0 update_from_paxos version 0 summary v 0 2016-03-08 15:19:02.160113 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.160115 7f82a6b55700 10 mon.a@0(leader).monmap v0 update_from_paxos version 1, my v 0 2016-03-08 15:19:02.160116 7f82a6b55700 10 mon.a@0(leader).monmap v0 signaling that we need a bootstrap 2016-03-08 15:19:02.160122 7f82a6b55700 10 mon.a@0(leader).monmap v0 update_from_paxos got 1 2016-03-08 15:19:02.167163 7f82a6b55700 1 mon.a@0(leader).paxosservice(auth 0..0) refresh upgraded, format 1 -> 0 2016-03-08 15:19:02.167168 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 0..0) refresh 2016-03-08 15:19:02.167169 7f82a6b55700 10 mon.a@0(leader).auth v0 update_from_paxos 2016-03-08 15:19:02.167171 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 0..0) post_refresh 2016-03-08 15:19:02.167172 7f82a6b55700 10 mon.a@0(leader).pg v0 post_paxos_update 2016-03-08 15:19:02.167173 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 0..0) post_refresh 2016-03-08 15:19:02.167174 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 0..0) post_refresh 2016-03-08 15:19:02.167174 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 0..0) post_refresh 2016-03-08 15:19:02.167175 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.167176 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 0..0) post_refresh 2016-03-08 15:19:02.167178 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..1) doing requested bootstrap 2016-03-08 15:19:02.167184 7f82a6b55700 10 mon.a@0(leader) e1 bootstrap 2016-03-08 15:19:02.167185 7f82a6b55700 10 mon.a@0(leader) e1 sync_reset_requester 2016-03-08 15:19:02.167186 7f82a6b55700 10 mon.a@0(leader) e1 unregister_cluster_logger 2016-03-08 15:19:02.167188 7f82a6b55700 10 mon.a@0(leader) e1 cancel_probe_timeout (none scheduled) 2016-03-08 15:19:02.167189 7f82a6b55700 10 mon.a@0(probing) e1 _reset 2016-03-08 15:19:02.167190 7f82a6b55700 10 mon.a@0(probing) e1 cancel_probe_timeout (none scheduled) 2016-03-08 15:19:02.167191 7f82a6b55700 10 mon.a@0(probing) e1 timecheck_finish 2016-03-08 15:19:02.167192 7f82a6b55700 15 mon.a@0(probing) e1 health_tick_stop 2016-03-08 15:19:02.167193 7f82a6b55700 15 mon.a@0(probing) e1 health_interval_stop 2016-03-08 15:19:02.167194 7f82a6b55700 10 mon.a@0(probing) e1 scrub_event_cancel 2016-03-08 15:19:02.167196 7f82a6b55700 10 mon.a@0(probing) e1 scrub_reset 2016-03-08 15:19:02.167196 7f82a6b55700 10 mon.a@0(probing).paxos(paxos refresh c 1..1) restart -- canceling timeouts 2016-03-08 15:19:02.167234 7f82a6b55700 10 mon.a@0(probing).paxosservice(pgmap 0..0) restart 2016-03-08 15:19:02.167236 7f82a6b55700 10 mon.a@0(probing).paxosservice(mdsmap 0..0) restart 2016-03-08 15:19:02.167237 7f82a6b55700 10 mon.a@0(probing).paxosservice(osdmap 0..0) restart 2016-03-08 15:19:02.167237 7f82a6b55700 10 mon.a@0(probing).paxosservice(logm 0..0) restart 2016-03-08 15:19:02.167250 7f82a6b55700 10 mon.a@0(probing).paxosservice(logm 0..0) dispatch 0x559cd1560f00 log(1 entries from seq 1 at 2016-03-08 15:19:02.102501) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.167256 7f82a6b55700 5 mon.a@0(probing).paxos(paxos recovering c 1..1) is_readable = 0 - now=2016-03-08 15:19:02.167256 lease_expire=0.000000 has v0 lc 1 2016-03-08 15:19:02.167260 7f82a6b55700 10 mon.a@0(probing).paxosservice(logm 0..0) waiting for paxos -> readable (v0) 2016-03-08 15:19:02.167268 7f82a6b55700 10 mon.a@0(probing).paxosservice(monmap 1..1) restart 2016-03-08 15:19:02.167268 7f82a6b55700 10 mon.a@0(probing).paxosservice(auth 0..0) restart 2016-03-08 15:19:02.167270 7f82a6b55700 1 mon.a@0(probing) e1 win_standalone_election 2016-03-08 15:19:02.167277 7f82a6b55700 1 mon.a@0(probing).elector(2) init, last seen epoch 2 2016-03-08 15:19:02.167279 7f82a6b55700 10 mon.a@0(probing).elector(2) bump_epoch 2 to 3 2016-03-08 15:19:02.173651 7f82a6b55700 10 mon.a@0(probing) e1 join_election 2016-03-08 15:19:02.173665 7f82a6b55700 10 mon.a@0(probing) e1 _reset 2016-03-08 15:19:02.173665 7f82a6b55700 10 mon.a@0(probing) e1 cancel_probe_timeout (none scheduled) 2016-03-08 15:19:02.173666 7f82a6b55700 10 mon.a@0(probing) e1 timecheck_finish 2016-03-08 15:19:02.173667 7f82a6b55700 15 mon.a@0(probing) e1 health_tick_stop 2016-03-08 15:19:02.173668 7f82a6b55700 15 mon.a@0(probing) e1 health_interval_stop 2016-03-08 15:19:02.173668 7f82a6b55700 10 mon.a@0(probing) e1 scrub_event_cancel 2016-03-08 15:19:02.173669 7f82a6b55700 10 mon.a@0(probing) e1 scrub_reset 2016-03-08 15:19:02.173670 7f82a6b55700 10 mon.a@0(probing).paxos(paxos recovering c 1..1) restart -- canceling timeouts 2016-03-08 15:19:02.173673 7f82a6b55700 10 mon.a@0(probing).paxosservice(pgmap 0..0) restart 2016-03-08 15:19:02.173675 7f82a6b55700 10 mon.a@0(probing).paxosservice(mdsmap 0..0) restart 2016-03-08 15:19:02.173676 7f82a6b55700 10 mon.a@0(probing).paxosservice(osdmap 0..0) restart 2016-03-08 15:19:02.173677 7f82a6b55700 10 mon.a@0(probing).paxosservice(logm 0..0) restart 2016-03-08 15:19:02.173708 7f82a6b55700 10 mon.a@0(probing).paxosservice(logm 0..0) dispatch 0x559cd1560f00 log(1 entries from seq 1 at 2016-03-08 15:19:02.102501) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.173715 7f82a6b55700 5 mon.a@0(probing).paxos(paxos recovering c 1..1) is_readable = 0 - now=2016-03-08 15:19:02.173716 lease_expire=0.000000 has v0 lc 1 2016-03-08 15:19:02.173718 7f82a6b55700 10 mon.a@0(probing).paxosservice(logm 0..0) waiting for paxos -> readable (v0) 2016-03-08 15:19:02.173729 7f82a6b55700 10 mon.a@0(probing).paxosservice(monmap 1..1) restart 2016-03-08 15:19:02.173730 7f82a6b55700 10 mon.a@0(probing).paxosservice(auth 0..0) restart 2016-03-08 15:19:02.173734 7f82a6b55700 10 mon.a@0(electing) e1 win_election epoch 3 quorum 0 features 576460752303423487 2016-03-08 15:19:02.173742 7f82a6b55700 0 log_channel(cluster) log [INF] : mon.a@0 won leader election with quorum 0 2016-03-08 15:19:02.173756 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 2 at 2016-03-08 15:19:02.173744) v1 -- ?+0 0x559cd1561900 con 0x559cd1477080 2016-03-08 15:19:02.173760 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 2 at 2016-03-08 15:19:02.173744) v1 local 2016-03-08 15:19:02.173767 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) election_finished 2016-03-08 15:19:02.173769 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) _active 2016-03-08 15:19:02.173770 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) remove_legacy_versions 2016-03-08 15:19:02.173785 7f82a6b55700 7 mon.a@0(leader).paxosservice(monmap 1..1) _active creating new pending 2016-03-08 15:19:02.173789 7f82a6b55700 10 mon.a@0(leader).monmap v1 create_pending monmap epoch 2 2016-03-08 15:19:02.173790 7f82a6b55700 10 mon.a@0(leader).monmap v1 noting that i was, once, part of an active quorum. 2016-03-08 15:19:02.173798 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 2 at 2016-03-08 15:19:02.173744) v1 ==== 0+0+0 (0 0 0) 0x559cd1561900 con 0x559cd1477080 2016-03-08 15:19:02.179871 7f82a6b55700 0 log_channel(cluster) log [INF] : monmap e1: 1 mons at {a=127.0.0.1:7104/0} 2016-03-08 15:19:02.179893 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 3 at 2016-03-08 15:19:02.179877) v1 -- ?+0 0x559cd1561b80 con 0x559cd1477080 2016-03-08 15:19:02.179899 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 3 at 2016-03-08 15:19:02.179877) v1 local 2016-03-08 15:19:02.179905 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 0..0) election_finished 2016-03-08 15:19:02.179908 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 0..0) _active 2016-03-08 15:19:02.179909 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 0..0) remove_legacy_versions 2016-03-08 15:19:02.179920 7f82a6b55700 7 mon.a@0(leader).paxosservice(pgmap 0..0) _active creating new pending 2016-03-08 15:19:02.179922 7f82a6b55700 10 mon.a@0(leader).pg v0 create_pending v 1 2016-03-08 15:19:02.179923 7f82a6b55700 10 mon.a@0(leader).pg v0 create_initial -- creating initial map 2016-03-08 15:19:02.179924 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 0..0) propose_pending 2016-03-08 15:19:02.179925 7f82a6b55700 10 mon.a@0(leader).pg v0 encode_pending v 1 2016-03-08 15:19:02.179937 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..1) queue_pending_finisher 0x559cd14941d0 2016-03-08 15:19:02.179939 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..1) trigger_propose active, proposing now 2016-03-08 15:19:02.179960 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..1) propose_pending 2 414 bytes 2016-03-08 15:19:02.179962 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..1) begin for 2 414 bytes 2016-03-08 15:19:02.187055 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..1) commit_start 2 2016-03-08 15:19:02.187088 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 0..0) election_finished 2016-03-08 15:19:02.187091 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 0..0) _active 2016-03-08 15:19:02.187092 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 0..0) remove_legacy_versions 2016-03-08 15:19:02.187106 7f82a6b55700 7 mon.a@0(leader).paxosservice(mdsmap 0..0) _active creating new pending 2016-03-08 15:19:02.187114 7f82a6b55700 10 mon.a@0(leader).mds e0 create_pending e1 2016-03-08 15:19:02.187115 7f82a6b55700 10 mon.a@0(leader).mds e0 create_initial 2016-03-08 15:19:02.187116 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 0..0) propose_pending 2016-03-08 15:19:02.187121 7f82a6b55700 10 mon.a@0(leader).mds e0 encode_pending e1 2016-03-08 15:19:02.187129 7f82a6b55700 5 mon.a@0(leader).paxos(paxos writing c 1..1) queue_pending_finisher 0x559cd1494200 2016-03-08 15:19:02.187131 7f82a6b55700 10 mon.a@0(leader).paxos(paxos writing c 1..1) trigger_propose not active, will propose later 2016-03-08 15:19:02.187132 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 0..0) election_finished 2016-03-08 15:19:02.187133 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 0..0) _active 2016-03-08 15:19:02.187134 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 0..0) remove_legacy_versions 2016-03-08 15:19:02.187136 7f82a6b55700 7 mon.a@0(leader).paxosservice(osdmap 0..0) _active creating new pending 2016-03-08 15:19:02.187141 7f82a6b55700 10 mon.a@0(leader).osd e0 create_pending e 1 2016-03-08 15:19:02.187150 7f82a6b55700 10 mon.a@0(leader).osd e0 create_initial for c4878902-7748-4a77-afab-07655d3b0201 2016-03-08 15:19:02.187242 7f82a6b55700 20 mon.a@0(leader).osd e0 full crc 3104298101 2016-03-08 15:19:02.187247 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 0..0) propose_pending 2016-03-08 15:19:02.187259 7f82a6b55700 10 mon.a@0(leader).osd e0 encode_pending e 1 2016-03-08 15:19:02.187304 7f82a6b55700 20 mon.a@0(leader).osd e0 full_crc 3104298101 inc_crc 165368911 2016-03-08 15:19:02.187325 7f82a6b55700 5 mon.a@0(leader).paxos(paxos writing c 1..1) queue_pending_finisher 0x559cd1494390 2016-03-08 15:19:02.187327 7f82a6b55700 10 mon.a@0(leader).paxos(paxos writing c 1..1) trigger_propose not active, will propose later 2016-03-08 15:19:02.187328 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 0..0) election_finished 2016-03-08 15:19:02.187339 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 0..0) dispatch 0x559cd1560f00 log(1 entries from seq 1 at 2016-03-08 15:19:02.102501) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.187346 7f82a6b55700 5 mon.a@0(leader).paxos(paxos writing c 1..1) is_readable = 1 - now=2016-03-08 15:19:02.187347 lease_expire=0.000000 has v0 lc 1 2016-03-08 15:19:02.187349 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 0..0) waiting for paxos -> readable (v0) 2016-03-08 15:19:02.187357 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 0..0) _active 2016-03-08 15:19:02.187358 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 0..0) remove_legacy_versions 2016-03-08 15:19:02.187368 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 0..0) _active creating new pending 2016-03-08 15:19:02.187371 7f82a6b55700 10 mon.a@0(leader).log v0 create_pending v 1 2016-03-08 15:19:02.187372 7f82a6b55700 10 mon.a@0(leader).log v0 create_initial -- creating initial map 2016-03-08 15:19:02.187374 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 0..0) propose_pending 2016-03-08 15:19:02.187375 7f82a6b55700 10 mon.a@0(leader).log v0 encode_full log v 0 2016-03-08 15:19:02.187379 7f82a6b55700 10 mon.a@0(leader).log v0 encode_pending v1 2016-03-08 15:19:02.187385 7f82a6b55700 5 mon.a@0(leader).paxos(paxos writing c 1..1) queue_pending_finisher 0x559cd14943a0 2016-03-08 15:19:02.187386 7f82a6b55700 10 mon.a@0(leader).paxos(paxos writing c 1..1) trigger_propose not active, will propose later 2016-03-08 15:19:02.187388 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 0..0) election_finished 2016-03-08 15:19:02.187388 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 0..0) _active 2016-03-08 15:19:02.187389 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 0..0) remove_legacy_versions 2016-03-08 15:19:02.187392 7f82a6b55700 7 mon.a@0(leader).paxosservice(auth 0..0) _active creating new pending 2016-03-08 15:19:02.187395 7f82a6b55700 10 mon.a@0(leader).auth v0 create_pending v 1 2016-03-08 15:19:02.187396 7f82a6b55700 10 mon.a@0(leader).auth v0 create_initial -- creating initial map 2016-03-08 15:19:02.187402 7f82a6b55700 10 mon.a@0(leader).auth v0 check_rotate updated rotating 2016-03-08 15:19:02.187407 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 0..0) propose_pending 2016-03-08 15:19:02.187412 7f82a6b55700 10 mon.a@0(leader).auth v0 encode_pending v 1 2016-03-08 15:19:02.187422 7f82a6b55700 5 mon.a@0(leader).paxos(paxos writing c 1..1) queue_pending_finisher 0x559cd14943b0 2016-03-08 15:19:02.187423 7f82a6b55700 10 mon.a@0(leader).paxos(paxos writing c 1..1) trigger_propose not active, will propose later 2016-03-08 15:19:02.187425 7f82a6b55700 10 mon.a@0(leader).data_health(3) start_epoch epoch 3 2016-03-08 15:19:02.187429 7f82a6b55700 10 mon.a@0(leader) e1 timecheck_finish 2016-03-08 15:19:02.187431 7f82a6b55700 10 mon.a@0(leader) e1 resend_routed_requests 2016-03-08 15:19:02.187434 7f82a6b55700 10 mon.a@0(leader) e1 register_cluster_logger 2016-03-08 15:19:02.228393 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.228404 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.228423 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 0..0) dispatch 0x559cd1561900 log(1 entries from seq 2 at 2016-03-08 15:19:02.173744) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.228432 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..1) is_readable = 1 - now=2016-03-08 15:19:02.228433 lease_expire=0.000000 has v0 lc 1 2016-03-08 15:19:02.228438 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 0..0) waiting for paxos -> readable (v0) 2016-03-08 15:19:02.228455 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1561900 2016-03-08 15:19:02.228472 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 3 at 2016-03-08 15:19:02.179877) v1 ==== 0+0+0 (0 0 0) 0x559cd1561b80 con 0x559cd1477080 2016-03-08 15:19:02.228560 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.228567 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.228581 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 0..0) dispatch 0x559cd1561b80 log(1 entries from seq 3 at 2016-03-08 15:19:02.179877) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.228587 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..1) is_readable = 1 - now=2016-03-08 15:19:02.228588 lease_expire=0.000000 has v0 lc 1 2016-03-08 15:19:02.228591 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 0..0) waiting for paxos -> readable (v0) 2016-03-08 15:19:02.228604 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1561b80 2016-03-08 15:19:02.234733 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..1) commit_finish 2 2016-03-08 15:19:02.234777 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.234795 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..1) refresh 2016-03-08 15:19:02.234798 7f82a6b55700 10 mon.a@0(leader).pg v0 update_from_paxos v0, read_full 2016-03-08 15:19:02.234799 7f82a6b55700 10 mon.a@0(leader).pg v0 read_pgmap_meta 2016-03-08 15:19:02.234816 7f82a6b55700 10 mon.a@0(leader).pg v1 update_logger 2016-03-08 15:19:02.234826 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 0..0) refresh 2016-03-08 15:19:02.234830 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 0..0) refresh 2016-03-08 15:19:02.234835 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 0..0) refresh 2016-03-08 15:19:02.234836 7f82a6b55700 10 mon.a@0(leader).log v0 update_from_paxos 2016-03-08 15:19:02.234837 7f82a6b55700 10 mon.a@0(leader).log v0 update_from_paxos version 0 summary v 0 2016-03-08 15:19:02.234844 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.234850 7f82a6b55700 1 mon.a@0(leader).paxosservice(auth 0..0) refresh upgraded, format 1 -> 0 2016-03-08 15:19:02.234852 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 0..0) refresh 2016-03-08 15:19:02.234853 7f82a6b55700 10 mon.a@0(leader).auth v0 update_from_paxos 2016-03-08 15:19:02.234855 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..1) post_refresh 2016-03-08 15:19:02.234856 7f82a6b55700 10 mon.a@0(leader).pg v1 post_paxos_update 2016-03-08 15:19:02.234856 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 0..0) post_refresh 2016-03-08 15:19:02.234857 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 0..0) post_refresh 2016-03-08 15:19:02.234857 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 0..0) post_refresh 2016-03-08 15:19:02.234858 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.234859 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 0..0) post_refresh 2016-03-08 15:19:02.234861 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..2) commit_proposal 2016-03-08 15:19:02.234863 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..1) _active - not active 2016-03-08 15:19:02.234864 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..2) finish_round 2016-03-08 15:19:02.234865 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..2) finish_round waiting_for_acting 2016-03-08 15:19:02.234866 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..1) _active 2016-03-08 15:19:02.234867 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..1) remove_legacy_versions 2016-03-08 15:19:02.234876 7f82a6b55700 7 mon.a@0(leader).paxosservice(pgmap 1..1) _active creating new pending 2016-03-08 15:19:02.234878 7f82a6b55700 10 mon.a@0(leader).pg v1 create_pending v 2 2016-03-08 15:19:02.234879 7f82a6b55700 10 mon.a@0(leader).pg v1 check_osd_map already seen 0 >= 0 2016-03-08 15:19:02.234880 7f82a6b55700 10 mon.a@0(leader).pg v1 update_logger 2016-03-08 15:19:02.234889 7f82a6b55700 0 log_channel(cluster) log [INF] : pgmap v1: 0 pgs: ; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:02.234907 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 4 at 2016-03-08 15:19:02.234891) v1 -- ?+0 0x559cd1561e00 con 0x559cd1477080 2016-03-08 15:19:02.234913 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 4 at 2016-03-08 15:19:02.234891) v1 local 2016-03-08 15:19:02.234919 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..2) finish_round waiting_for_readable 2016-03-08 15:19:02.234921 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..2) finish_round waiting_for_writeable 2016-03-08 15:19:02.234921 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..2) finish_round done w/ waiters, state 1 2016-03-08 15:19:02.234947 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..2) propose_pending 3 3555 bytes 2016-03-08 15:19:02.234963 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..2) begin for 3 3555 bytes 2016-03-08 15:19:02.234961 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 4 at 2016-03-08 15:19:02.234891) v1 ==== 0+0+0 (0 0 0) 0x559cd1561e00 con 0x559cd1477080 2016-03-08 15:19:02.241200 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..2) commit_start 3 2016-03-08 15:19:02.241297 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.241306 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.241329 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 0..0) dispatch 0x559cd1561e00 log(1 entries from seq 4 at 2016-03-08 15:19:02.234891) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.241337 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..2) is_readable = 1 - now=2016-03-08 15:19:02.241337 lease_expire=0.000000 has v0 lc 2 2016-03-08 15:19:02.241342 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 0..0) waiting for paxos -> readable (v0) 2016-03-08 15:19:02.241363 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1561e00 2016-03-08 15:19:02.248395 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..2) commit_finish 3 2016-03-08 15:19:02.248429 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.248440 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..1) refresh 2016-03-08 15:19:02.248446 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.248448 7f82a6b55700 10 mon.a@0(leader).mds e0 update_from_paxos version 1, my e 0 2016-03-08 15:19:02.248455 7f82a6b55700 10 mon.a@0(leader).mds e0 update_from_paxos got 1 2016-03-08 15:19:02.248464 7f82a6b55700 4 mon.a@0(leader).mds e1 new map 2016-03-08 15:19:02.248465 7f82a6b55700 0 mon.a@0(leader).mds e1 print_map epoch 1 flags 0 created 0.000000 modified 2016-03-08 15:19:02.187123 tableserver 0 root 0 session_timeout 0 session_autoclose 0 max_file_size 0 last_failure 0 last_failure_osd_epoch 0 compat compat={},rocompat={},incompat={} max_mds 0 in up {} failed damaged stopped data_pools metadata_pool 0 inline_data disabled 2016-03-08 15:19:02.248476 7f82a6b55700 10 mon.a@0(leader).mds e1 update_logger 2016-03-08 15:19:02.248482 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) refresh 2016-03-08 15:19:02.248484 7f82a6b55700 15 mon.a@0(leader).osd e0 update_from_paxos paxos e 1, my e 0 2016-03-08 15:19:02.248496 7f82a6b55700 7 mon.a@0(leader).osd e0 update_from_paxos applying incremental 1 2016-03-08 15:19:02.248549 7f82a6b55700 1 mon.a@0(leader).osd e1 e1: 0 osds: 0 up, 0 in 2016-03-08 15:19:02.254831 7f82a6b55700 5 mon.a@0(leader).paxos(paxos refresh c 1..3) is_readable = 0 - now=2016-03-08 15:19:02.254832 lease_expire=0.000000 has v0 lc 3 2016-03-08 15:19:02.254838 7f82a6b55700 10 mon.a@0(leader).pg v1 check_osd_map -- osdmap not readable, waiting 2016-03-08 15:19:02.254840 7f82a6b55700 10 mon.a@0(leader).osd e1 check_subs 2016-03-08 15:19:02.254841 7f82a6b55700 10 mon.a@0(leader).osd e1 share_map_with_random_osd no up osds, don't share with anyone 2016-03-08 15:19:02.254852 7f82a6b55700 10 mon.a@0(leader).osd e1 update_logger 2016-03-08 15:19:02.254855 7f82a6b55700 0 mon.a@0(leader).osd e1 crush map has features 1107558400, adjusting msgr requires 2016-03-08 15:19:02.254856 7f82a6b55700 0 mon.a@0(leader).osd e1 crush map has features 1107558400, adjusting msgr requires 2016-03-08 15:19:02.254858 7f82a6b55700 0 mon.a@0(leader).osd e1 crush map has features 1107558400, adjusting msgr requires 2016-03-08 15:19:02.254861 7f82a6b55700 0 mon.a@0(leader).osd e1 crush map has features 1107558400, adjusting msgr requires 2016-03-08 15:19:02.254872 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) refresh 2016-03-08 15:19:02.254873 7f82a6b55700 10 mon.a@0(leader).log v1 update_from_paxos 2016-03-08 15:19:02.254874 7f82a6b55700 10 mon.a@0(leader).log v1 update_from_paxos version 1 summary v 0 2016-03-08 15:19:02.254877 7f82a6b55700 10 mon.a@0(leader).log v1 update_from_paxos latest full 0 2016-03-08 15:19:02.254881 7f82a6b55700 7 mon.a@0(leader).log v1 update_from_paxos applying incremental log 1 2016-03-08 15:19:02.187372 unknown.0 :/0 0 : [INF] mkfs c4878902-7748-4a77-afab-07655d3b0201 2016-03-08 15:19:02.254892 7f82a6b55700 20 mon.a@0(leader).log v1 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.254904 7f82a6b55700 15 mon.a@0(leader).log v1 update_from_paxos logging for 1 channels 2016-03-08 15:19:02.254906 7f82a6b55700 15 mon.a@0(leader).log v1 update_from_paxos channel 'cluster' logging 94 bytes 2016-03-08 15:19:02.255034 7f82a6b55700 10 mon.a@0(leader).log v1 check_subs 2016-03-08 15:19:02.255058 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.255063 7f82a6b55700 1 mon.a@0(leader).paxosservice(auth 1..1) refresh upgraded, format 0 -> 1 2016-03-08 15:19:02.255064 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..1) refresh 2016-03-08 15:19:02.255065 7f82a6b55700 10 mon.a@0(leader).auth v1 update_from_paxos 2016-03-08 15:19:02.255069 7f82a6b55700 10 mon.a@0(leader).auth v1 update_from_paxos version 1 keys ver 0 latest 0 2016-03-08 15:19:02.255070 7f82a6b55700 10 mon.a@0(leader).auth v1 update_from_paxos key server version 0 2016-03-08 15:19:02.255074 7f82a6b55700 20 mon.a@0(leader).auth v1 update_from_paxos walking through version 1 len 667 2016-03-08 15:19:02.255197 7f82a6b55700 10 mon.a@0(leader).auth v1 update_from_paxos() last_allocated_id=4096 max_global_id=4096 format_version 1 2016-03-08 15:19:02.255200 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..1) post_refresh 2016-03-08 15:19:02.255201 7f82a6b55700 10 mon.a@0(leader).pg v1 post_paxos_update 2016-03-08 15:19:02.255202 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.255203 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) post_refresh 2016-03-08 15:19:02.255203 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) post_refresh 2016-03-08 15:19:02.255203 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.255214 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..1) post_refresh 2016-03-08 15:19:02.255215 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..3) commit_proposal 2016-03-08 15:19:02.255217 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) _active - not active 2016-03-08 15:19:02.255218 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) _active - not active 2016-03-08 15:19:02.255218 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) _active - not active 2016-03-08 15:19:02.255219 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..1) _active - not active 2016-03-08 15:19:02.255220 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..3) finish_round 2016-03-08 15:19:02.255221 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..3) finish_round waiting_for_acting 2016-03-08 15:19:02.255222 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) _active 2016-03-08 15:19:02.255223 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) remove_legacy_versions 2016-03-08 15:19:02.255228 7f82a6b55700 7 mon.a@0(leader).paxosservice(mdsmap 1..1) _active creating new pending 2016-03-08 15:19:02.255230 7f82a6b55700 10 mon.a@0(leader).mds e1 create_pending e2 2016-03-08 15:19:02.255231 7f82a6b55700 10 mon.a@0(leader).mds e1 update_logger 2016-03-08 15:19:02.255235 7f82a6b55700 0 log_channel(cluster) log [INF] : mdsmap e1: 0/0/0 up 2016-03-08 15:19:02.255254 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 5 at 2016-03-08 15:19:02.255237) v1 -- ?+0 0x559cd1562080 con 0x559cd1477080 2016-03-08 15:19:02.255259 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 5 at 2016-03-08 15:19:02.255237) v1 local 2016-03-08 15:19:02.255263 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) _active 2016-03-08 15:19:02.255264 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) remove_legacy_versions 2016-03-08 15:19:02.255268 7f82a6b55700 7 mon.a@0(leader).paxosservice(osdmap 1..1) _active creating new pending 2016-03-08 15:19:02.255270 7f82a6b55700 10 mon.a@0(leader).osd e1 create_pending e 2 2016-03-08 15:19:02.255282 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..3) is_readable = 1 - now=2016-03-08 15:19:02.255282 lease_expire=0.000000 has v0 lc 3 2016-03-08 15:19:02.255286 7f82a6b55700 10 mon.a@0(leader).pg v1 check_osd_map applying osdmap e1 to pg_map 2016-03-08 15:19:02.255290 7f82a6b55700 10 mon.a@0(leader).pg v1 map_pg_creates to 0 pgs, osdmap epoch 1 2016-03-08 15:19:02.255291 7f82a6b55700 10 mon.a@0(leader).pg v1 register_new_pgs checking pg pools for osdmap epoch 1, last_pg_scan 0 2016-03-08 15:19:02.255293 7f82a6b55700 10 mon.a@0(leader).pg v1 register_new_pgs scanning pool 0 replicated size 3 min_size 2 crush_ruleset 0 object_hash rjenkins pg_num 64 pgp_num 64 last_change 1 flags hashpspool stripe_width 0 2016-03-08 15:19:02.255299 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.0 primary -1 acting [] 2016-03-08 15:19:02.255292 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 5 at 2016-03-08 15:19:02.255237) v1 ==== 0+0+0 (0 0 0) 0x559cd1562080 con 0x559cd1477080 2016-03-08 15:19:02.255302 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.1 primary -1 acting [] 2016-03-08 15:19:02.255304 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.2 primary -1 acting [] 2016-03-08 15:19:02.255306 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.3 primary -1 acting [] 2016-03-08 15:19:02.255307 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.4 primary -1 acting [] 2016-03-08 15:19:02.255308 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.5 primary -1 acting [] 2016-03-08 15:19:02.255310 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.6 primary -1 acting [] 2016-03-08 15:19:02.255311 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.7 primary -1 acting [] 2016-03-08 15:19:02.255312 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.8 primary -1 acting [] 2016-03-08 15:19:02.255313 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.9 primary -1 acting [] 2016-03-08 15:19:02.255314 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.a primary -1 acting [] 2016-03-08 15:19:02.255315 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.b primary -1 acting [] 2016-03-08 15:19:02.255316 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.c primary -1 acting [] 2016-03-08 15:19:02.255317 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.d primary -1 acting [] 2016-03-08 15:19:02.255319 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.e primary -1 acting [] 2016-03-08 15:19:02.255320 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.f primary -1 acting [] 2016-03-08 15:19:02.255321 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.10 primary -1 acting [] 2016-03-08 15:19:02.255323 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.11 primary -1 acting [] 2016-03-08 15:19:02.255324 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.12 primary -1 acting [] 2016-03-08 15:19:02.255324 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.13 primary -1 acting [] 2016-03-08 15:19:02.255330 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.14 primary -1 acting [] 2016-03-08 15:19:02.255331 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.15 primary -1 acting [] 2016-03-08 15:19:02.255332 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.16 primary -1 acting [] 2016-03-08 15:19:02.255333 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.17 primary -1 acting [] 2016-03-08 15:19:02.255334 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.18 primary -1 acting [] 2016-03-08 15:19:02.255335 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.19 primary -1 acting [] 2016-03-08 15:19:02.255336 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.1a primary -1 acting [] 2016-03-08 15:19:02.255337 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.1b primary -1 acting [] 2016-03-08 15:19:02.255338 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.1c primary -1 acting [] 2016-03-08 15:19:02.255339 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.1d primary -1 acting [] 2016-03-08 15:19:02.255340 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.1e primary -1 acting [] 2016-03-08 15:19:02.255341 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.1f primary -1 acting [] 2016-03-08 15:19:02.255342 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.20 primary -1 acting [] 2016-03-08 15:19:02.255343 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.21 primary -1 acting [] 2016-03-08 15:19:02.255344 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.22 primary -1 acting [] 2016-03-08 15:19:02.255345 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.23 primary -1 acting [] 2016-03-08 15:19:02.255346 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.24 primary -1 acting [] 2016-03-08 15:19:02.255347 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.25 primary -1 acting [] 2016-03-08 15:19:02.255348 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.26 primary -1 acting [] 2016-03-08 15:19:02.255349 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.27 primary -1 acting [] 2016-03-08 15:19:02.255350 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.28 primary -1 acting [] 2016-03-08 15:19:02.255351 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.29 primary -1 acting [] 2016-03-08 15:19:02.255352 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.2a primary -1 acting [] 2016-03-08 15:19:02.255357 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.2b primary -1 acting [] 2016-03-08 15:19:02.255358 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.2c primary -1 acting [] 2016-03-08 15:19:02.255359 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.2d primary -1 acting [] 2016-03-08 15:19:02.255360 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.2e primary -1 acting [] 2016-03-08 15:19:02.255361 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.2f primary -1 acting [] 2016-03-08 15:19:02.255362 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.30 primary -1 acting [] 2016-03-08 15:19:02.255363 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.31 primary -1 acting [] 2016-03-08 15:19:02.255364 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.32 primary -1 acting [] 2016-03-08 15:19:02.255365 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.33 primary -1 acting [] 2016-03-08 15:19:02.255366 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.34 primary -1 acting [] 2016-03-08 15:19:02.255367 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.35 primary -1 acting [] 2016-03-08 15:19:02.255368 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.36 primary -1 acting [] 2016-03-08 15:19:02.255369 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.37 primary -1 acting [] 2016-03-08 15:19:02.255370 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.38 primary -1 acting [] 2016-03-08 15:19:02.255375 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.39 primary -1 acting [] 2016-03-08 15:19:02.255377 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.3a primary -1 acting [] 2016-03-08 15:19:02.255378 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.3b primary -1 acting [] 2016-03-08 15:19:02.255378 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.3c primary -1 acting [] 2016-03-08 15:19:02.255379 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.3d primary -1 acting [] 2016-03-08 15:19:02.255381 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.3e primary -1 acting [] 2016-03-08 15:19:02.255382 7f82a6b55700 10 mon.a@0(leader).pg v1 register_pg will create 0.3f primary -1 acting [] 2016-03-08 15:19:02.255382 7f82a6b55700 10 mon.a@0(leader).pg v1 register_new_pgs registered 64 new pgs, removed 0 uncreated pgs 2016-03-08 15:19:02.255384 7f82a6b55700 10 mon.a@0(leader).pg v1 check_down_pgs 2016-03-08 15:19:02.255384 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..1) propose_pending 2016-03-08 15:19:02.255386 7f82a6b55700 10 mon.a@0(leader).pg v1 encode_pending v 2 2016-03-08 15:19:02.255674 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..3) queue_pending_finisher 0x559cd14941b0 2016-03-08 15:19:02.255677 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..3) trigger_propose active, proposing now 2016-03-08 15:19:02.255708 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..3) propose_pending 4 37898 bytes 2016-03-08 15:19:02.255709 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..3) begin for 4 37898 bytes 2016-03-08 15:19:02.267070 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..3) commit_start 4 2016-03-08 15:19:02.267165 7f82a6b55700 10 mon.a@0(leader).osd e1 update_logger 2016-03-08 15:19:02.267182 7f82a6b55700 0 log_channel(cluster) log [INF] : osdmap e1: 0 osds: 0 up, 0 in 2016-03-08 15:19:02.267204 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 6 at 2016-03-08 15:19:02.267183) v1 -- ?+0 0x559cd1562300 con 0x559cd1477080 2016-03-08 15:19:02.267210 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 6 at 2016-03-08 15:19:02.267183) v1 local 2016-03-08 15:19:02.267216 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) _active 2016-03-08 15:19:02.267218 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) remove_legacy_versions 2016-03-08 15:19:02.267232 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..1) _active creating new pending 2016-03-08 15:19:02.267237 7f82a6b55700 10 mon.a@0(leader).log v1 create_pending v 2 2016-03-08 15:19:02.267246 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) dispatch 0x559cd1560f00 log(1 entries from seq 1 at 2016-03-08 15:19:02.102501) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.267252 7f82a6b55700 5 mon.a@0(leader).paxos(paxos writing c 1..3) is_readable = 1 - now=2016-03-08 15:19:02.267252 lease_expire=0.000000 has v0 lc 3 2016-03-08 15:19:02.267263 7f82a6b55700 10 mon.a@0(leader).log v1 preprocess_query log(1 entries from seq 1 at 2016-03-08 15:19:02.102501) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.267269 7f82a6b55700 10 mon.a@0(leader).log v1 preprocess_log log(1 entries from seq 1 at 2016-03-08 15:19:02.102501) v1 from mon.0 2016-03-08 15:19:02.267274 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.267275 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.267276 7f82a6b55700 20 allow all 2016-03-08 15:19:02.267279 7f82a6b55700 10 mon.a@0(leader).log v1 prepare_update log(1 entries from seq 1 at 2016-03-08 15:19:02.102501) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.267286 7f82a6b55700 10 mon.a@0(leader).log v1 prepare_log log(1 entries from seq 1 at 2016-03-08 15:19:02.102501) v1 from mon.0 2016-03-08 15:19:02.267289 7f82a6b55700 10 mon.a@0(leader).log v1 logging 2016-03-08 15:19:02.102501 mon.0 127.0.0.1:7104/0 1 : cluster [INF] mon.a@0 won leader election with quorum 0 2016-03-08 15:19:02.267297 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) propose_pending 2016-03-08 15:19:02.267300 7f82a6b55700 10 mon.a@0(leader).log v1 encode_full log v 1 2016-03-08 15:19:02.267305 7f82a6b55700 10 mon.a@0(leader).log v1 encode_pending v2 2016-03-08 15:19:02.267314 7f82a6b55700 5 mon.a@0(leader).paxos(paxos writing c 1..3) queue_pending_finisher 0x559cd1494200 2016-03-08 15:19:02.267316 7f82a6b55700 10 mon.a@0(leader).paxos(paxos writing c 1..3) trigger_propose not active, will propose later 2016-03-08 15:19:02.267323 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) dispatch 0x559cd1561900 log(1 entries from seq 2 at 2016-03-08 15:19:02.173744) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.267327 7f82a6b55700 5 mon.a@0(leader).paxos(paxos writing c 1..3) is_readable = 1 - now=2016-03-08 15:19:02.267327 lease_expire=0.000000 has v0 lc 3 2016-03-08 15:19:02.267331 7f82a6b55700 10 mon.a@0(leader).log v1 preprocess_query log(1 entries from seq 2 at 2016-03-08 15:19:02.173744) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.267337 7f82a6b55700 10 mon.a@0(leader).log v1 preprocess_log log(1 entries from seq 2 at 2016-03-08 15:19:02.173744) v1 from mon.0 2016-03-08 15:19:02.267339 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.267340 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.267340 7f82a6b55700 20 allow all 2016-03-08 15:19:02.267341 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) waiting for paxos -> writeable 2016-03-08 15:19:02.267355 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) dispatch 0x559cd1561b80 log(1 entries from seq 3 at 2016-03-08 15:19:02.179877) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.267362 7f82a6b55700 5 mon.a@0(leader).paxos(paxos writing c 1..3) is_readable = 1 - now=2016-03-08 15:19:02.267363 lease_expire=0.000000 has v0 lc 3 2016-03-08 15:19:02.267371 7f82a6b55700 10 mon.a@0(leader).log v1 preprocess_query log(1 entries from seq 3 at 2016-03-08 15:19:02.179877) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.267376 7f82a6b55700 10 mon.a@0(leader).log v1 preprocess_log log(1 entries from seq 3 at 2016-03-08 15:19:02.179877) v1 from mon.0 2016-03-08 15:19:02.267378 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.267379 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.267379 7f82a6b55700 20 allow all 2016-03-08 15:19:02.267380 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) waiting for paxos -> writeable 2016-03-08 15:19:02.267396 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) dispatch 0x559cd1561e00 log(1 entries from seq 4 at 2016-03-08 15:19:02.234891) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.267399 7f82a6b55700 5 mon.a@0(leader).paxos(paxos writing c 1..3) is_readable = 1 - now=2016-03-08 15:19:02.267399 lease_expire=0.000000 has v0 lc 3 2016-03-08 15:19:02.267403 7f82a6b55700 10 mon.a@0(leader).log v1 preprocess_query log(1 entries from seq 4 at 2016-03-08 15:19:02.234891) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.267407 7f82a6b55700 10 mon.a@0(leader).log v1 preprocess_log log(1 entries from seq 4 at 2016-03-08 15:19:02.234891) v1 from mon.0 2016-03-08 15:19:02.267409 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.267410 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.267411 7f82a6b55700 20 allow all 2016-03-08 15:19:02.267411 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) waiting for paxos -> writeable 2016-03-08 15:19:02.267418 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..1) _active 2016-03-08 15:19:02.267419 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..1) remove_legacy_versions 2016-03-08 15:19:02.267424 7f82a6b55700 7 mon.a@0(leader).paxosservice(auth 1..1) _active creating new pending 2016-03-08 15:19:02.267427 7f82a6b55700 10 mon.a@0(leader).auth v1 create_pending v 2 2016-03-08 15:19:02.267429 7f82a6b55700 20 mon.a@0(leader).auth v1 upgrade_format format 1 is current 2016-03-08 15:19:02.267431 7f82a6b55700 10 mon.a@0(leader).auth v1 AuthMonitor::on_active() 2016-03-08 15:19:02.267434 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..3) finish_round waiting_for_readable 2016-03-08 15:19:02.267435 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..3) finish_round waiting_for_writeable 2016-03-08 15:19:02.267435 7f82a6b55700 10 mon.a@0(leader).paxos(paxos writing c 1..3) finish_round done w/ waiters, state 4 2016-03-08 15:19:02.267485 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.267491 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.267501 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..1) dispatch 0x559cd1562080 log(1 entries from seq 5 at 2016-03-08 15:19:02.255237) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.267507 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..3) is_readable = 1 - now=2016-03-08 15:19:02.267508 lease_expire=0.000000 has v0 lc 3 2016-03-08 15:19:02.267519 7f82a5352700 10 mon.a@0(leader).log v1 preprocess_query log(1 entries from seq 5 at 2016-03-08 15:19:02.255237) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.267533 7f82a5352700 10 mon.a@0(leader).log v1 preprocess_log log(1 entries from seq 5 at 2016-03-08 15:19:02.255237) v1 from mon.0 2016-03-08 15:19:02.267538 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.267539 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.267540 7f82a5352700 20 allow all 2016-03-08 15:19:02.267542 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..1) waiting for paxos -> writeable 2016-03-08 15:19:02.267554 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1562080 2016-03-08 15:19:02.267561 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 6 at 2016-03-08 15:19:02.267183) v1 ==== 0+0+0 (0 0 0) 0x559cd1562300 con 0x559cd1477080 2016-03-08 15:19:02.267593 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.267597 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.267606 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..1) dispatch 0x559cd1562300 log(1 entries from seq 6 at 2016-03-08 15:19:02.267183) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.267612 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..3) is_readable = 1 - now=2016-03-08 15:19:02.267612 lease_expire=0.000000 has v0 lc 3 2016-03-08 15:19:02.267621 7f82a5352700 10 mon.a@0(leader).log v1 preprocess_query log(1 entries from seq 6 at 2016-03-08 15:19:02.267183) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.267630 7f82a5352700 10 mon.a@0(leader).log v1 preprocess_log log(1 entries from seq 6 at 2016-03-08 15:19:02.267183) v1 from mon.0 2016-03-08 15:19:02.267634 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.267635 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.267636 7f82a5352700 20 allow all 2016-03-08 15:19:02.267636 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..1) waiting for paxos -> writeable 2016-03-08 15:19:02.267649 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1562300 2016-03-08 15:19:02.285490 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..3) commit_finish 4 2016-03-08 15:19:02.285544 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.285562 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) refresh 2016-03-08 15:19:02.285564 7f82a6b55700 10 mon.a@0(leader).pg v1 update_from_paxos read_incremental 2016-03-08 15:19:02.285597 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.0 got 0 len 533 2016-03-08 15:19:02.285611 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.1 got 0 len 533 2016-03-08 15:19:02.285616 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.2 got 0 len 533 2016-03-08 15:19:02.285626 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.3 got 0 len 533 2016-03-08 15:19:02.285632 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.4 got 0 len 533 2016-03-08 15:19:02.285635 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.5 got 0 len 533 2016-03-08 15:19:02.285638 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.6 got 0 len 533 2016-03-08 15:19:02.285640 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.7 got 0 len 533 2016-03-08 15:19:02.285643 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.8 got 0 len 533 2016-03-08 15:19:02.285646 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.9 got 0 len 533 2016-03-08 15:19:02.285653 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.a got 0 len 533 2016-03-08 15:19:02.285658 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.b got 0 len 533 2016-03-08 15:19:02.285660 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.c got 0 len 533 2016-03-08 15:19:02.285663 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.d got 0 len 533 2016-03-08 15:19:02.285665 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.e got 0 len 533 2016-03-08 15:19:02.285668 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.f got 0 len 533 2016-03-08 15:19:02.285671 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.10 got 0 len 533 2016-03-08 15:19:02.285673 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.11 got 0 len 533 2016-03-08 15:19:02.285676 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.12 got 0 len 533 2016-03-08 15:19:02.285679 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.13 got 0 len 533 2016-03-08 15:19:02.285683 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.14 got 0 len 533 2016-03-08 15:19:02.285685 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.15 got 0 len 533 2016-03-08 15:19:02.285689 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.16 got 0 len 533 2016-03-08 15:19:02.285697 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.17 got 0 len 533 2016-03-08 15:19:02.285701 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.18 got 0 len 533 2016-03-08 15:19:02.285704 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.19 got 0 len 533 2016-03-08 15:19:02.285706 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.1a got 0 len 533 2016-03-08 15:19:02.285709 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.1b got 0 len 533 2016-03-08 15:19:02.285712 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.1c got 0 len 533 2016-03-08 15:19:02.285714 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.1d got 0 len 533 2016-03-08 15:19:02.285717 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.1e got 0 len 533 2016-03-08 15:19:02.285719 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.1f got 0 len 533 2016-03-08 15:19:02.285722 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.20 got 0 len 533 2016-03-08 15:19:02.285724 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.21 got 0 len 533 2016-03-08 15:19:02.285727 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.22 got 0 len 533 2016-03-08 15:19:02.285735 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.23 got 0 len 533 2016-03-08 15:19:02.285739 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.24 got 0 len 533 2016-03-08 15:19:02.285741 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.25 got 0 len 533 2016-03-08 15:19:02.285744 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.26 got 0 len 533 2016-03-08 15:19:02.285748 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.27 got 0 len 533 2016-03-08 15:19:02.285751 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.28 got 0 len 533 2016-03-08 15:19:02.285754 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.29 got 0 len 533 2016-03-08 15:19:02.285757 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.2a got 0 len 533 2016-03-08 15:19:02.285760 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.2b got 0 len 533 2016-03-08 15:19:02.285762 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.2c got 0 len 533 2016-03-08 15:19:02.285766 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.2d got 0 len 533 2016-03-08 15:19:02.285769 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.2e got 0 len 533 2016-03-08 15:19:02.285773 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.2f got 0 len 533 2016-03-08 15:19:02.285777 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.30 got 0 len 533 2016-03-08 15:19:02.285780 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.31 got 0 len 533 2016-03-08 15:19:02.285782 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.32 got 0 len 533 2016-03-08 15:19:02.285786 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.33 got 0 len 533 2016-03-08 15:19:02.285788 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.34 got 0 len 533 2016-03-08 15:19:02.285792 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.35 got 0 len 533 2016-03-08 15:19:02.285795 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.36 got 0 len 533 2016-03-08 15:19:02.285799 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.37 got 0 len 533 2016-03-08 15:19:02.285801 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.38 got 0 len 533 2016-03-08 15:19:02.285805 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.39 got 0 len 533 2016-03-08 15:19:02.285808 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.3a got 0 len 533 2016-03-08 15:19:02.285811 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.3b got 0 len 533 2016-03-08 15:19:02.285818 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.3c got 0 len 533 2016-03-08 15:19:02.285821 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.3d got 0 len 533 2016-03-08 15:19:02.285824 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.3e got 0 len 533 2016-03-08 15:19:02.285827 7f82a6b55700 20 mon.a@0(leader).pg v1 refreshing pg 0.3f got 0 len 533 2016-03-08 15:19:02.285833 7f82a6b55700 10 mon.a@0(leader).pg v2 read_pgmap_meta 2016-03-08 15:19:02.285846 7f82a6b55700 10 mon.a@0(leader).pg v2 update_logger 2016-03-08 15:19:02.285854 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.285861 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) refresh 2016-03-08 15:19:02.285867 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) refresh 2016-03-08 15:19:02.285869 7f82a6b55700 10 mon.a@0(leader).log v1 update_from_paxos 2016-03-08 15:19:02.285871 7f82a6b55700 10 mon.a@0(leader).log v1 update_from_paxos version 1 summary v 1 2016-03-08 15:19:02.285877 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.285884 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..1) refresh 2016-03-08 15:19:02.285885 7f82a6b55700 10 mon.a@0(leader).auth v1 update_from_paxos 2016-03-08 15:19:02.285887 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) post_refresh 2016-03-08 15:19:02.285888 7f82a6b55700 10 mon.a@0(leader).pg v2 post_paxos_update 2016-03-08 15:19:02.285889 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.285889 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) post_refresh 2016-03-08 15:19:02.285890 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..1) post_refresh 2016-03-08 15:19:02.285890 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.285891 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..1) post_refresh 2016-03-08 15:19:02.285892 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..4) commit_proposal 2016-03-08 15:19:02.285894 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) _active - not active 2016-03-08 15:19:02.285895 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..4) finish_round 2016-03-08 15:19:02.285896 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..4) finish_round waiting_for_acting 2016-03-08 15:19:02.285897 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) _active 2016-03-08 15:19:02.285898 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) remove_legacy_versions 2016-03-08 15:19:02.285901 7f82a6b55700 7 mon.a@0(leader).paxosservice(pgmap 1..2) _active creating new pending 2016-03-08 15:19:02.285919 7f82a6b55700 10 mon.a@0(leader).pg v2 create_pending v 3 2016-03-08 15:19:02.285921 7f82a6b55700 10 mon.a@0(leader).pg v2 check_osd_map already seen 1 >= 1 2016-03-08 15:19:02.285922 7f82a6b55700 10 mon.a@0(leader).pg v2 update_logger 2016-03-08 15:19:02.285930 7f82a6b55700 0 log_channel(cluster) log [INF] : pgmap v2: 64 pgs: 64 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:02.285947 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 7 at 2016-03-08 15:19:02.285933) v1 -- ?+0 0x559cd1562580 con 0x559cd1477080 2016-03-08 15:19:02.285964 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 7 at 2016-03-08 15:19:02.285933) v1 local 2016-03-08 15:19:02.285970 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..4) finish_round waiting_for_readable 2016-03-08 15:19:02.285972 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..4) finish_round waiting_for_writeable 2016-03-08 15:19:02.285973 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..4) finish_round done w/ waiters, state 1 2016-03-08 15:19:02.285978 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..4) propose_pending 5 644 bytes 2016-03-08 15:19:02.285980 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..4) begin for 5 644 bytes 2016-03-08 15:19:02.285999 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 7 at 2016-03-08 15:19:02.285933) v1 ==== 0+0+0 (0 0 0) 0x559cd1562580 con 0x559cd1477080 2016-03-08 15:19:02.292029 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..4) commit_start 5 2016-03-08 15:19:02.292101 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.292110 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.292119 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..1) dispatch 0x559cd1562580 log(1 entries from seq 7 at 2016-03-08 15:19:02.285933) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.292135 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..4) is_readable = 1 - now=2016-03-08 15:19:02.292135 lease_expire=0.000000 has v0 lc 4 2016-03-08 15:19:02.292141 7f82a5352700 10 mon.a@0(leader).log v1 preprocess_query log(1 entries from seq 7 at 2016-03-08 15:19:02.285933) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.292147 7f82a5352700 10 mon.a@0(leader).log v1 preprocess_log log(1 entries from seq 7 at 2016-03-08 15:19:02.285933) v1 from mon.0 2016-03-08 15:19:02.292151 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.292152 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.292152 7f82a5352700 20 allow all 2016-03-08 15:19:02.292154 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..1) waiting for paxos -> writeable 2016-03-08 15:19:02.292161 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1562580 2016-03-08 15:19:02.299412 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..4) commit_finish 5 2016-03-08 15:19:02.299462 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.299491 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) refresh 2016-03-08 15:19:02.299504 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.299513 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) refresh 2016-03-08 15:19:02.299524 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) refresh 2016-03-08 15:19:02.299527 7f82a6b55700 10 mon.a@0(leader).log v2 update_from_paxos 2016-03-08 15:19:02.299529 7f82a6b55700 10 mon.a@0(leader).log v2 update_from_paxos version 2 summary v 1 2016-03-08 15:19:02.299534 7f82a6b55700 10 mon.a@0(leader).log v2 update_from_paxos latest full 1 2016-03-08 15:19:02.299546 7f82a6b55700 7 mon.a@0(leader).log v2 update_from_paxos applying incremental log 2 2016-03-08 15:19:02.102501 mon.0 127.0.0.1:7104/0 1 : cluster [INF] mon.a@0 won leader election with quorum 0 2016-03-08 15:19:02.299563 7f82a6b55700 20 mon.a@0(leader).log v2 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.299575 7f82a6b55700 15 mon.a@0(leader).log v2 update_from_paxos logging for 1 channels 2016-03-08 15:19:02.299576 7f82a6b55700 15 mon.a@0(leader).log v2 update_from_paxos channel 'cluster' logging 110 bytes 2016-03-08 15:19:02.299594 7f82a6b55700 10 mon.a@0(leader).log v2 check_subs 2016-03-08 15:19:02.299612 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.299624 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..1) refresh 2016-03-08 15:19:02.299626 7f82a6b55700 10 mon.a@0(leader).auth v1 update_from_paxos 2016-03-08 15:19:02.299628 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) post_refresh 2016-03-08 15:19:02.299629 7f82a6b55700 10 mon.a@0(leader).pg v2 post_paxos_update 2016-03-08 15:19:02.299632 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.299633 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) post_refresh 2016-03-08 15:19:02.299635 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) post_refresh 2016-03-08 15:19:02.299636 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.299637 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..1) post_refresh 2016-03-08 15:19:02.299638 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..5) commit_proposal 2016-03-08 15:19:02.299641 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) _active - not active 2016-03-08 15:19:02.299643 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..5) finish_round 2016-03-08 15:19:02.299644 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..5) finish_round waiting_for_acting 2016-03-08 15:19:02.299646 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) _active 2016-03-08 15:19:02.299647 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) remove_legacy_versions 2016-03-08 15:19:02.299653 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..2) _active creating new pending 2016-03-08 15:19:02.299655 7f82a6b55700 10 mon.a@0(leader).log v2 create_pending v 3 2016-03-08 15:19:02.299675 7f82a6b55700 7 mon.a@0(leader).log v2 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.299691 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd149a0a0 0x559cd1555440 log(last 1) v1 2016-03-08 15:19:02.299696 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 1) v1 -- ?+0 0x559cd1555440 con 0x559cd1477080 2016-03-08 15:19:02.299702 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 1) v1 local 2016-03-08 15:19:02.299750 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) dispatch 0x559cd1561900 log(1 entries from seq 2 at 2016-03-08 15:19:02.173744) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.299757 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..5) is_readable = 1 - now=2016-03-08 15:19:02.299757 lease_expire=0.000000 has v0 lc 5 2016-03-08 15:19:02.299755 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 1) v1 ==== 0+0+0 (0 0 0) 0x559cd1555440 con 0x559cd1477080 2016-03-08 15:19:02.299769 7f82a6b55700 10 mon.a@0(leader).log v2 preprocess_query log(1 entries from seq 2 at 2016-03-08 15:19:02.173744) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.299794 7f82a6b55700 10 mon.a@0(leader).log v2 preprocess_log log(1 entries from seq 2 at 2016-03-08 15:19:02.173744) v1 from mon.0 2016-03-08 15:19:02.299799 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.299800 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.299801 7f82a6b55700 20 allow all 2016-03-08 15:19:02.299808 7f82a6b55700 10 mon.a@0(leader).log v2 prepare_update log(1 entries from seq 2 at 2016-03-08 15:19:02.173744) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.299823 7f82a6b55700 10 mon.a@0(leader).log v2 prepare_log log(1 entries from seq 2 at 2016-03-08 15:19:02.173744) v1 from mon.0 2016-03-08 15:19:02.299828 7f82a6b55700 10 mon.a@0(leader).log v2 logging 2016-03-08 15:19:02.173744 mon.0 127.0.0.1:7104/0 2 : cluster [INF] mon.a@0 won leader election with quorum 0 2016-03-08 15:19:02.299841 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) setting proposal_timer 0x559cd1494200 with delay of 0.0995877 2016-03-08 15:19:02.299894 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) dispatch 0x559cd1561b80 log(1 entries from seq 3 at 2016-03-08 15:19:02.179877) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.299899 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..5) is_readable = 1 - now=2016-03-08 15:19:02.299900 lease_expire=0.000000 has v0 lc 5 2016-03-08 15:19:02.299924 7f82a6b55700 10 mon.a@0(leader).log v2 preprocess_query log(1 entries from seq 3 at 2016-03-08 15:19:02.179877) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.299938 7f82a6b55700 10 mon.a@0(leader).log v2 preprocess_log log(1 entries from seq 3 at 2016-03-08 15:19:02.179877) v1 from mon.0 2016-03-08 15:19:02.299942 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.299943 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.299944 7f82a6b55700 20 allow all 2016-03-08 15:19:02.299962 7f82a6b55700 10 mon.a@0(leader).log v2 prepare_update log(1 entries from seq 3 at 2016-03-08 15:19:02.179877) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.299976 7f82a6b55700 10 mon.a@0(leader).log v2 prepare_log log(1 entries from seq 3 at 2016-03-08 15:19:02.179877) v1 from mon.0 2016-03-08 15:19:02.299980 7f82a6b55700 10 mon.a@0(leader).log v2 logging 2016-03-08 15:19:02.179877 mon.0 127.0.0.1:7104/0 3 : cluster [INF] monmap e1: 1 mons at {a=127.0.0.1:7104/0} 2016-03-08 15:19:02.299992 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) proposal_timer already set 2016-03-08 15:19:02.300004 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) dispatch 0x559cd1561e00 log(1 entries from seq 4 at 2016-03-08 15:19:02.234891) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.300009 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..5) is_readable = 1 - now=2016-03-08 15:19:02.300009 lease_expire=0.000000 has v0 lc 5 2016-03-08 15:19:02.300018 7f82a6b55700 10 mon.a@0(leader).log v2 preprocess_query log(1 entries from seq 4 at 2016-03-08 15:19:02.234891) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.300028 7f82a6b55700 10 mon.a@0(leader).log v2 preprocess_log log(1 entries from seq 4 at 2016-03-08 15:19:02.234891) v1 from mon.0 2016-03-08 15:19:02.300038 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.300040 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.300041 7f82a6b55700 20 allow all 2016-03-08 15:19:02.300048 7f82a6b55700 10 mon.a@0(leader).log v2 prepare_update log(1 entries from seq 4 at 2016-03-08 15:19:02.234891) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.300063 7f82a6b55700 10 mon.a@0(leader).log v2 prepare_log log(1 entries from seq 4 at 2016-03-08 15:19:02.234891) v1 from mon.0 2016-03-08 15:19:02.300067 7f82a6b55700 10 mon.a@0(leader).log v2 logging 2016-03-08 15:19:02.234891 mon.0 127.0.0.1:7104/0 4 : cluster [INF] pgmap v1: 0 pgs: ; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:02.300079 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) proposal_timer already set 2016-03-08 15:19:02.300095 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) dispatch 0x559cd1562080 log(1 entries from seq 5 at 2016-03-08 15:19:02.255237) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.300100 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..5) is_readable = 1 - now=2016-03-08 15:19:02.300100 lease_expire=0.000000 has v0 lc 5 2016-03-08 15:19:02.300108 7f82a6b55700 10 mon.a@0(leader).log v2 preprocess_query log(1 entries from seq 5 at 2016-03-08 15:19:02.255237) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.300119 7f82a6b55700 10 mon.a@0(leader).log v2 preprocess_log log(1 entries from seq 5 at 2016-03-08 15:19:02.255237) v1 from mon.0 2016-03-08 15:19:02.300123 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.300124 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.300125 7f82a6b55700 20 allow all 2016-03-08 15:19:02.300131 7f82a6b55700 10 mon.a@0(leader).log v2 prepare_update log(1 entries from seq 5 at 2016-03-08 15:19:02.255237) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.300141 7f82a6b55700 10 mon.a@0(leader).log v2 prepare_log log(1 entries from seq 5 at 2016-03-08 15:19:02.255237) v1 from mon.0 2016-03-08 15:19:02.300145 7f82a6b55700 10 mon.a@0(leader).log v2 logging 2016-03-08 15:19:02.255237 mon.0 127.0.0.1:7104/0 5 : cluster [INF] mdsmap e1: 0/0/0 up 2016-03-08 15:19:02.300161 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) proposal_timer already set 2016-03-08 15:19:02.300174 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) dispatch 0x559cd1562300 log(1 entries from seq 6 at 2016-03-08 15:19:02.267183) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.300180 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..5) is_readable = 1 - now=2016-03-08 15:19:02.300180 lease_expire=0.000000 has v0 lc 5 2016-03-08 15:19:02.300188 7f82a6b55700 10 mon.a@0(leader).log v2 preprocess_query log(1 entries from seq 6 at 2016-03-08 15:19:02.267183) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.300202 7f82a6b55700 10 mon.a@0(leader).log v2 preprocess_log log(1 entries from seq 6 at 2016-03-08 15:19:02.267183) v1 from mon.0 2016-03-08 15:19:02.300206 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.300208 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.300208 7f82a6b55700 20 allow all 2016-03-08 15:19:02.300213 7f82a6b55700 10 mon.a@0(leader).log v2 prepare_update log(1 entries from seq 6 at 2016-03-08 15:19:02.267183) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.300222 7f82a6b55700 10 mon.a@0(leader).log v2 prepare_log log(1 entries from seq 6 at 2016-03-08 15:19:02.267183) v1 from mon.0 2016-03-08 15:19:02.300226 7f82a6b55700 10 mon.a@0(leader).log v2 logging 2016-03-08 15:19:02.267183 mon.0 127.0.0.1:7104/0 6 : cluster [INF] osdmap e1: 0 osds: 0 up, 0 in 2016-03-08 15:19:02.300236 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) proposal_timer already set 2016-03-08 15:19:02.300249 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) dispatch 0x559cd1562580 log(1 entries from seq 7 at 2016-03-08 15:19:02.285933) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.300255 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..5) is_readable = 1 - now=2016-03-08 15:19:02.300255 lease_expire=0.000000 has v0 lc 5 2016-03-08 15:19:02.300265 7f82a6b55700 10 mon.a@0(leader).log v2 preprocess_query log(1 entries from seq 7 at 2016-03-08 15:19:02.285933) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.300279 7f82a6b55700 10 mon.a@0(leader).log v2 preprocess_log log(1 entries from seq 7 at 2016-03-08 15:19:02.285933) v1 from mon.0 2016-03-08 15:19:02.300283 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.300284 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.300286 7f82a6b55700 20 allow all 2016-03-08 15:19:02.300292 7f82a6b55700 10 mon.a@0(leader).log v2 prepare_update log(1 entries from seq 7 at 2016-03-08 15:19:02.285933) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.300303 7f82a6b55700 10 mon.a@0(leader).log v2 prepare_log log(1 entries from seq 7 at 2016-03-08 15:19:02.285933) v1 from mon.0 2016-03-08 15:19:02.300307 7f82a6b55700 10 mon.a@0(leader).log v2 logging 2016-03-08 15:19:02.285933 mon.0 127.0.0.1:7104/0 7 : cluster [INF] pgmap v2: 64 pgs: 64 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:02.300318 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..2) proposal_timer already set 2016-03-08 15:19:02.300321 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..5) finish_round waiting_for_readable 2016-03-08 15:19:02.300323 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..5) finish_round waiting_for_writeable 2016-03-08 15:19:02.300325 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..5) finish_round done w/ waiters, state 1 2016-03-08 15:19:02.300367 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.300372 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.300375 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.300376 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.300376 7f82a5352700 20 allow all 2016-03-08 15:19:02.300391 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555440 2016-03-08 15:19:02.399546 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..2) propose_pending 2016-03-08 15:19:02.399562 7f82a5b53700 10 mon.a@0(leader).log v2 encode_full log v 2 2016-03-08 15:19:02.399575 7f82a5b53700 10 mon.a@0(leader).log v2 encode_pending v3 2016-03-08 15:19:02.399582 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..5) queue_pending_finisher 0x559cd1494410 2016-03-08 15:19:02.399584 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..5) trigger_propose active, proposing now 2016-03-08 15:19:02.399588 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..5) propose_pending 6 2012 bytes 2016-03-08 15:19:02.399590 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..5) begin for 6 2012 bytes 2016-03-08 15:19:02.408356 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..5) commit_start 6 2016-03-08 15:19:02.414476 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..5) commit_finish 6 2016-03-08 15:19:02.414507 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.414521 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) refresh 2016-03-08 15:19:02.414529 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.414535 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) refresh 2016-03-08 15:19:02.414543 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..3) refresh 2016-03-08 15:19:02.414544 7f82a6b55700 10 mon.a@0(leader).log v3 update_from_paxos 2016-03-08 15:19:02.414545 7f82a6b55700 10 mon.a@0(leader).log v3 update_from_paxos version 3 summary v 2 2016-03-08 15:19:02.414549 7f82a6b55700 10 mon.a@0(leader).log v3 update_from_paxos latest full 2 2016-03-08 15:19:02.414561 7f82a6b55700 7 mon.a@0(leader).log v3 update_from_paxos applying incremental log 3 2016-03-08 15:19:02.173744 mon.0 127.0.0.1:7104/0 2 : cluster [INF] mon.a@0 won leader election with quorum 0 2016-03-08 15:19:02.414574 7f82a6b55700 20 mon.a@0(leader).log v3 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.414581 7f82a6b55700 7 mon.a@0(leader).log v3 update_from_paxos applying incremental log 3 2016-03-08 15:19:02.179877 mon.0 127.0.0.1:7104/0 3 : cluster [INF] monmap e1: 1 mons at {a=127.0.0.1:7104/0} 2016-03-08 15:19:02.414588 7f82a6b55700 20 mon.a@0(leader).log v3 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.414593 7f82a6b55700 7 mon.a@0(leader).log v3 update_from_paxos applying incremental log 3 2016-03-08 15:19:02.234891 mon.0 127.0.0.1:7104/0 4 : cluster [INF] pgmap v1: 0 pgs: ; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:02.414597 7f82a6b55700 20 mon.a@0(leader).log v3 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.414601 7f82a6b55700 7 mon.a@0(leader).log v3 update_from_paxos applying incremental log 3 2016-03-08 15:19:02.255237 mon.0 127.0.0.1:7104/0 5 : cluster [INF] mdsmap e1: 0/0/0 up 2016-03-08 15:19:02.414605 7f82a6b55700 20 mon.a@0(leader).log v3 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.414609 7f82a6b55700 7 mon.a@0(leader).log v3 update_from_paxos applying incremental log 3 2016-03-08 15:19:02.267183 mon.0 127.0.0.1:7104/0 6 : cluster [INF] osdmap e1: 0 osds: 0 up, 0 in 2016-03-08 15:19:02.414612 7f82a6b55700 20 mon.a@0(leader).log v3 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.414616 7f82a6b55700 7 mon.a@0(leader).log v3 update_from_paxos applying incremental log 3 2016-03-08 15:19:02.285933 mon.0 127.0.0.1:7104/0 7 : cluster [INF] pgmap v2: 64 pgs: 64 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:02.414620 7f82a6b55700 20 mon.a@0(leader).log v3 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.414623 7f82a6b55700 15 mon.a@0(leader).log v3 update_from_paxos logging for 1 channels 2016-03-08 15:19:02.414624 7f82a6b55700 15 mon.a@0(leader).log v3 update_from_paxos channel 'cluster' logging 678 bytes 2016-03-08 15:19:02.414637 7f82a6b55700 10 mon.a@0(leader).log v3 check_subs 2016-03-08 15:19:02.414649 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.414656 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..1) refresh 2016-03-08 15:19:02.414658 7f82a6b55700 10 mon.a@0(leader).auth v1 update_from_paxos 2016-03-08 15:19:02.414661 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) post_refresh 2016-03-08 15:19:02.414662 7f82a6b55700 10 mon.a@0(leader).pg v2 post_paxos_update 2016-03-08 15:19:02.414663 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.414664 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) post_refresh 2016-03-08 15:19:02.414664 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..3) post_refresh 2016-03-08 15:19:02.414665 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.414665 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..1) post_refresh 2016-03-08 15:19:02.414666 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..6) commit_proposal 2016-03-08 15:19:02.414668 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..3) _active - not active 2016-03-08 15:19:02.414669 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..6) finish_round 2016-03-08 15:19:02.414670 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..6) finish_round waiting_for_acting 2016-03-08 15:19:02.414671 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..3) _active 2016-03-08 15:19:02.414672 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..3) remove_legacy_versions 2016-03-08 15:19:02.414675 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..3) _active creating new pending 2016-03-08 15:19:02.414678 7f82a6b55700 10 mon.a@0(leader).log v3 create_pending v 4 2016-03-08 15:19:02.414684 7f82a6b55700 7 mon.a@0(leader).log v3 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.414689 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd149b960 0x559cd1555680 log(last 2) v1 2016-03-08 15:19:02.414692 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 2) v1 -- ?+0 0x559cd1555680 con 0x559cd1477080 2016-03-08 15:19:02.414695 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 2) v1 local 2016-03-08 15:19:02.414716 7f82a6b55700 7 mon.a@0(leader).log v3 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.414721 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd149bde0 0x559cd1555200 log(last 3) v1 2016-03-08 15:19:02.414723 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 3) v1 -- ?+0 0x559cd1555200 con 0x559cd1477080 2016-03-08 15:19:02.414724 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 3) v1 local 2016-03-08 15:19:02.414730 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 2) v1 ==== 0+0+0 (0 0 0) 0x559cd1555680 con 0x559cd1477080 2016-03-08 15:19:02.414739 7f82a6b55700 7 mon.a@0(leader).log v3 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.414743 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd170e120 0x559cd15558c0 log(last 4) v1 2016-03-08 15:19:02.414744 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 4) v1 -- ?+0 0x559cd15558c0 con 0x559cd1477080 2016-03-08 15:19:02.414745 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 4) v1 local 2016-03-08 15:19:02.414760 7f82a6b55700 7 mon.a@0(leader).log v3 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.414765 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd170e360 0x559cd1555b00 log(last 5) v1 2016-03-08 15:19:02.414766 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 5) v1 -- ?+0 0x559cd1555b00 con 0x559cd1477080 2016-03-08 15:19:02.414767 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 5) v1 local 2016-03-08 15:19:02.414784 7f82a6b55700 7 mon.a@0(leader).log v3 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.414789 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd170f7a0 0x559cd1555d40 log(last 6) v1 2016-03-08 15:19:02.414791 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 6) v1 -- ?+0 0x559cd1555d40 con 0x559cd1477080 2016-03-08 15:19:02.414792 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 6) v1 local 2016-03-08 15:19:02.414803 7f82a6b55700 7 mon.a@0(leader).log v3 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.414810 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd170fd40 0x559cd1555f80 log(last 7) v1 2016-03-08 15:19:02.414811 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 7) v1 -- ?+0 0x559cd1555f80 con 0x559cd1477080 2016-03-08 15:19:02.414812 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 7) v1 local 2016-03-08 15:19:02.414822 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..6) finish_round waiting_for_readable 2016-03-08 15:19:02.414824 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..6) finish_round waiting_for_writeable 2016-03-08 15:19:02.414825 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..6) finish_round done w/ waiters, state 1 2016-03-08 15:19:02.414865 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.414870 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.414876 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.414878 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.414880 7f82a5352700 20 allow all 2016-03-08 15:19:02.414896 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555680 2016-03-08 15:19:02.414901 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 3) v1 ==== 0+0+0 (0 0 0) 0x559cd1555200 con 0x559cd1477080 2016-03-08 15:19:02.414920 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.414924 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.414928 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.414929 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.414930 7f82a5352700 20 allow all 2016-03-08 15:19:02.414940 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555200 2016-03-08 15:19:02.414945 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 4) v1 ==== 0+0+0 (0 0 0) 0x559cd15558c0 con 0x559cd1477080 2016-03-08 15:19:02.414979 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.414982 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.414986 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.414987 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.414988 7f82a5352700 20 allow all 2016-03-08 15:19:02.415000 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15558c0 2016-03-08 15:19:02.415003 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 5) v1 ==== 0+0+0 (0 0 0) 0x559cd1555b00 con 0x559cd1477080 2016-03-08 15:19:02.415022 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.415026 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.415030 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.415031 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.415031 7f82a5352700 20 allow all 2016-03-08 15:19:02.415051 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555b00 2016-03-08 15:19:02.415054 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 6) v1 ==== 0+0+0 (0 0 0) 0x559cd1555d40 con 0x559cd1477080 2016-03-08 15:19:02.415079 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.415082 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.415086 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.415087 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.415088 7f82a5352700 20 allow all 2016-03-08 15:19:02.415099 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555d40 2016-03-08 15:19:02.415103 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 7) v1 ==== 0+0+0 (0 0 0) 0x559cd1555f80 con 0x559cd1477080 2016-03-08 15:19:02.415120 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.415123 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.415126 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.415127 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.415128 7f82a5352700 20 allow all 2016-03-08 15:19:02.415138 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555f80 2016-03-08 15:19:02.473123 7f82a4350700 20 accepter.accepter poll got 1 2016-03-08 15:19:02.473138 7f82a4350700 10 accepter.pfd.revents=1 2016-03-08 15:19:02.473144 7f82a4350700 10 accepter.accepted incoming on sd 21 2016-03-08 15:19:02.473195 7f82a4350700 20 accepter.accepter calling poll 2016-03-08 15:19:02.473221 7f82aea26700 10 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd17aa000 sd=21 :0 s=0 pgs=0 cs=0 l=0 c=0x559cd1478400).accept 2016-03-08 15:19:02.473279 7f82aea26700 1 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1478400).accept sd=21 127.0.0.1:52218/0 2016-03-08 15:19:02.473349 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1478400).accept peer addr is 127.0.0.1:0/1829523028 2016-03-08 15:19:02.473361 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1478400).accept got peer connect_seq 0 global_seq 1 2016-03-08 15:19:02.473365 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1478400).accept of host_type 8, policy.lossy=1 policy.server=1 policy.standby=0 policy.resetcheck=0 2016-03-08 15:19:02.473368 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1478400).accept my proto 15, their proto 15 2016-03-08 15:19:02.473372 7f82aea26700 10 mon.a@0(leader) e1 ms_verify_authorizer 127.0.0.1:0/1829523028 client protocol 0 2016-03-08 15:19:02.473375 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1478400).accept: setting up session_security. 2016-03-08 15:19:02.473377 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1478400).accept new session 2016-03-08 15:19:02.473379 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).accept success, connect_seq = 1, sending READY 2016-03-08 15:19:02.473382 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).accept features 576460752303423487 2016-03-08 15:19:02.473389 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).register_pipe 2016-03-08 15:19:02.473409 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).discard_requeued_up_to 0 2016-03-08 15:19:02.473414 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).accept starting writer, state open 2016-03-08 15:19:02.473445 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).accept done 2016-03-08 15:19:02.473453 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader reading tag... 2016-03-08 15:19:02.473455 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.473465 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sleeping 2016-03-08 15:19:02.473491 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got KEEPALIVE2 2016-03-08 15:19:02.473437 2016-03-08 15:19:02.473507 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader reading tag... 2016-03-08 15:19:02.473507 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.473510 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).write_keepalive2 15 2016-03-08 15:19:02.473437 2016-03-08 15:19:02.473526 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got MSG 2016-03-08 15:19:02.473530 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got envelope type=17 src client.? front=60 data=0 off 0 2016-03-08 15:19:02.473534 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader wants 60 bytes from policy throttler 0/104857600 2016-03-08 15:19:02.473538 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader wants 60 from dispatch throttler 0/104857600 2016-03-08 15:19:02.473543 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got front 60 2016-03-08 15:19:02.473542 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.473545 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).aborted = 0 2016-03-08 15:19:02.473547 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got 60 + 0 + 0 byte message 2016-03-08 15:19:02.473547 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sleeping 2016-03-08 15:19:02.473556 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got message 1 0x559cd17cb400 auth(proto 0 30 bytes epoch 0) v1 2016-03-08 15:19:02.473560 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd17cb400 prio 127 2016-03-08 15:19:02.473566 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader reading tag... 2016-03-08 15:19:02.473567 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.473573 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).write_ack 1 2016-03-08 15:19:02.473571 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/1829523028 1 ==== auth(proto 0 30 bytes epoch 0) v1 ==== 60+0+0 (900162395 0 0) 0x559cd17cb400 con 0x559cd1478400 2016-03-08 15:19:02.473590 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.473598 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sleeping 2016-03-08 15:19:02.473619 7f82a5352700 10 mon.a@0(leader) e1 _ms_dispatch new session 0x559cd14d7200 MonSession(client.? 127.0.0.1:0/1829523028 is open) 2016-03-08 15:19:02.473623 7f82a5352700 20 mon.a@0(leader) e1 caps 2016-03-08 15:19:02.473629 7f82a5352700 10 mon.a@0(leader).paxosservice(auth 1..1) dispatch 0x559cd17cb400 auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/1829523028 con 0x559cd1478400 2016-03-08 15:19:02.473633 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..6) is_readable = 1 - now=2016-03-08 15:19:02.473633 lease_expire=0.000000 has v0 lc 6 2016-03-08 15:19:02.473636 7f82a5352700 10 mon.a@0(leader).auth v1 preprocess_query auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/1829523028 2016-03-08 15:19:02.473639 7f82a5352700 10 mon.a@0(leader).auth v1 prep_auth() blob_size=30 2016-03-08 15:19:02.473645 7f82a5352700 10 mon.a@0(leader).auth v1 AuthMonitor::assign_global_id m=auth(proto 0 30 bytes epoch 0) v1 mon=0/1 last_allocated=4096 max_global_id=4096 2016-03-08 15:19:02.473646 7f82a5352700 10 mon.a@0(leader).auth v1 next_global_id should be 4097 2016-03-08 15:19:02.473648 7f82a5352700 10 mon.a@0(leader).auth v1 prepare_update auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/1829523028 2016-03-08 15:19:02.473650 7f82a5352700 10 mon.a@0(leader).auth v1 prep_auth() blob_size=30 2016-03-08 15:19:02.473651 7f82a5352700 10 mon.a@0(leader).auth v1 AuthMonitor::assign_global_id m=auth(proto 0 30 bytes epoch 0) v1 mon=0/1 last_allocated=4096 max_global_id=4096 2016-03-08 15:19:02.473652 7f82a5352700 10 mon.a@0(leader).auth v1 next_global_id should be 4097 2016-03-08 15:19:02.473654 7f82a5352700 10 mon.a@0(leader).auth v1 increasing max_global_id to 14096 2016-03-08 15:19:02.473655 7f82a5352700 10 mon.a@0(leader).auth v1 increasing global id, waitlisting message 2016-03-08 15:19:02.473661 7f82a5352700 10 mon.a@0(leader).paxosservice(auth 1..1) propose_pending 2016-03-08 15:19:02.473702 7f82a5352700 10 mon.a@0(leader).auth v1 encode_full auth v 1 2016-03-08 15:19:02.473705 7f82a5352700 20 mon.a@0(leader).auth v1 encode_full key server has no secrets! 2016-03-08 15:19:02.473719 7f82a5352700 10 mon.a@0(leader).auth v1 encode_pending v 2 2016-03-08 15:19:02.473728 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..6) queue_pending_finisher 0x559cd14943b0 2016-03-08 15:19:02.473730 7f82a5352700 10 mon.a@0(leader).paxos(paxos active c 1..6) trigger_propose active, proposing now 2016-03-08 15:19:02.473735 7f82a5352700 10 mon.a@0(leader).paxos(paxos active c 1..6) propose_pending 7 892 bytes 2016-03-08 15:19:02.473737 7f82a5352700 10 mon.a@0(leader).paxos(paxos updating c 1..6) begin for 7 892 bytes 2016-03-08 15:19:02.481128 7f82a5352700 10 mon.a@0(leader).paxos(paxos updating c 1..6) commit_start 7 2016-03-08 15:19:02.481153 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 60 to dispatch throttler 60/104857600 2016-03-08 15:19:02.481160 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17cb400 2016-03-08 15:19:02.488227 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..6) commit_finish 7 2016-03-08 15:19:02.488284 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.488301 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) refresh 2016-03-08 15:19:02.488321 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.488328 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) refresh 2016-03-08 15:19:02.488335 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..3) refresh 2016-03-08 15:19:02.488337 7f82a6b55700 10 mon.a@0(leader).log v3 update_from_paxos 2016-03-08 15:19:02.488338 7f82a6b55700 10 mon.a@0(leader).log v3 update_from_paxos version 3 summary v 3 2016-03-08 15:19:02.488345 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.488352 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:02.488354 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:02.488359 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos version 2 keys ver 1 latest 1 2016-03-08 15:19:02.488374 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos key server version 1 2016-03-08 15:19:02.488386 7f82a6b55700 20 mon.a@0(leader).auth v2 update_from_paxos walking through version 2 len 19 2016-03-08 15:19:02.488388 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos() last_allocated_id=4096 max_global_id=14096 format_version 1 2016-03-08 15:19:02.488389 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) post_refresh 2016-03-08 15:19:02.488390 7f82a6b55700 10 mon.a@0(leader).pg v2 post_paxos_update 2016-03-08 15:19:02.488392 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.488392 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..1) post_refresh 2016-03-08 15:19:02.488393 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..3) post_refresh 2016-03-08 15:19:02.488394 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.488399 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:02.488401 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..7) commit_proposal 2016-03-08 15:19:02.488403 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) _active - not active 2016-03-08 15:19:02.488405 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..7) finish_round 2016-03-08 15:19:02.488405 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..7) finish_round waiting_for_acting 2016-03-08 15:19:02.488427 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) dispatch 0x559cd17cb400 auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/1829523028 con 0x559cd1478400 2016-03-08 15:19:02.488435 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..7) is_readable = 1 - now=2016-03-08 15:19:02.488435 lease_expire=0.000000 has v0 lc 7 2016-03-08 15:19:02.488439 7f82a6b55700 10 mon.a@0(leader).auth v2 preprocess_query auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/1829523028 2016-03-08 15:19:02.488441 7f82a6b55700 10 mon.a@0(leader).auth v2 prep_auth() blob_size=30 2016-03-08 15:19:02.488446 7f82a6b55700 10 mon.a@0(leader).auth v2 AuthMonitor::assign_global_id m=auth(proto 0 30 bytes epoch 0) v1 mon=0/1 last_allocated=4096 max_global_id=14096 2016-03-08 15:19:02.488448 7f82a6b55700 10 mon.a@0(leader).auth v2 next_global_id should be 4097 2016-03-08 15:19:02.488452 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1829523028 -- mon_map magic: 0 v1 -- ?+0 0x559cd15561c0 con 0x559cd1478400 2016-03-08 15:19:02.488457 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/1829523028, have pipe. 2016-03-08 15:19:02.488469 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd17100a0 0x559cd1562080 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:02.488477 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1829523028 -- auth_reply(proto 1 0 (0) Success) v1 -- ?+0 0x559cd1562080 con 0x559cd1478400 2016-03-08 15:19:02.488480 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message auth_reply(proto 1 0 (0) Success) v1 remote, 127.0.0.1:0/1829523028, have pipe. 2016-03-08 15:19:02.488488 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.488498 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) _active 2016-03-08 15:19:02.488500 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) remove_legacy_versions 2016-03-08 15:19:02.488501 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer encoding 1 features 576460752303423487 0x559cd15561c0 mon_map magic: 0 v1 2016-03-08 15:19:02.488509 7f82a6b55700 7 mon.a@0(leader).paxosservice(auth 1..2) _active creating new pending 2016-03-08 15:19:02.488511 7f82a6b55700 10 mon.a@0(leader).auth v2 create_pending v 3 2016-03-08 15:19:02.488513 7f82a6b55700 20 mon.a@0(leader).auth v2 upgrade_format format 1 is current 2016-03-08 15:19:02.488514 7f82a6b55700 10 mon.a@0(leader).auth v2 AuthMonitor::on_active() 2016-03-08 15:19:02.488512 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer signed seq # 1): sig = 0 2016-03-08 15:19:02.488518 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..7) finish_round waiting_for_readable 2016-03-08 15:19:02.488519 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..7) finish_round waiting_for_writeable 2016-03-08 15:19:02.488520 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..7) finish_round done w/ waiters, state 1 2016-03-08 15:19:02.488519 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sending 1 0x559cd15561c0 2016-03-08 15:19:02.488570 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.488577 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer encoding 2 features 576460752303423487 0x559cd1562080 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:02.488597 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer signed seq # 2): sig = 0 2016-03-08 15:19:02.488603 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sending 2 0x559cd1562080 2016-03-08 15:19:02.488620 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.488626 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sleeping 2016-03-08 15:19:02.488820 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got ACK 2016-03-08 15:19:02.488834 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got ack seq 2 2016-03-08 15:19:02.488837 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader reading tag... 2016-03-08 15:19:02.488840 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got MSG 2016-03-08 15:19:02.488843 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got envelope type=15 src client.? front=23 data=0 off 0 2016-03-08 15:19:02.488847 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader wants 23 bytes from policy throttler 0/104857600 2016-03-08 15:19:02.488851 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader wants 23 from dispatch throttler 0/104857600 2016-03-08 15:19:02.488856 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got front 23 2016-03-08 15:19:02.488859 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).aborted = 0 2016-03-08 15:19:02.488862 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:02.488876 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got message 2 0x559cd14d7600 mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:02.488883 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd14d7600 prio 127 2016-03-08 15:19:02.488890 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader reading tag... 2016-03-08 15:19:02.488894 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.488897 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/1829523028 2 ==== mon_subscribe({monmap=0+}) v2 ==== 23+0+0 (1620593354 0 0) 0x559cd14d7600 con 0x559cd1478400 2016-03-08 15:19:02.488917 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got MSG 2016-03-08 15:19:02.488921 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).write_ack 2 2016-03-08 15:19:02.488923 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got envelope type=15 src client.4097 front=23 data=0 off 0 2016-03-08 15:19:02.488928 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader wants 23 bytes from policy throttler 23/104857600 2016-03-08 15:19:02.488931 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.488932 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader wants 23 from dispatch throttler 23/104857600 2016-03-08 15:19:02.488935 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sleeping 2016-03-08 15:19:02.488938 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got front 23 2016-03-08 15:19:02.488942 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).aborted = 0 2016-03-08 15:19:02.488946 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:02.488965 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d7200 for client.? 127.0.0.1:0/1829523028 2016-03-08 15:19:02.488970 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.488978 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.488980 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.488981 7f82a5352700 20 allow all 2016-03-08 15:19:02.488979 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got message 3 0x559cd14d7800 mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:02.488983 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:02.488985 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd14d7800 prio 127 2016-03-08 15:19:02.488990 7f82a5352700 10 mon.a@0(leader) e1 check_sub monmap next 0 have 1 2016-03-08 15:19:02.488990 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader reading tag... 2016-03-08 15:19:02.488995 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1829523028 -- mon_map magic: 0 v1 -- ?+0 0x559cd1555f80 con 0x559cd1478400 2016-03-08 15:19:02.488998 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.489005 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).write_ack 3 2016-03-08 15:19:02.489007 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/1829523028, have pipe. 2016-03-08 15:19:02.489017 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer encoding 3 features 576460752303423487 0x559cd1555f80 mon_map magic: 0 v1 2016-03-08 15:19:02.489020 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 46/104857600 2016-03-08 15:19:02.489022 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd14d7600 2016-03-08 15:19:02.489024 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer signed seq # 3): sig = 0 2016-03-08 15:19:02.489027 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sending 3 0x559cd1555f80 2016-03-08 15:19:02.489026 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4097 127.0.0.1:0/1829523028 3 ==== mon_subscribe({osdmap=0}) v2 ==== 23+0+0 (319581110 0 0) 0x559cd14d7800 con 0x559cd1478400 2016-03-08 15:19:02.489040 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d7200 for client.? 127.0.0.1:0/1829523028 2016-03-08 15:19:02.489043 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.489051 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.489047 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.489052 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.489052 7f82a5352700 20 allow all 2016-03-08 15:19:02.489053 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:02.489053 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sleeping 2016-03-08 15:19:02.489056 7f82a5352700 20 is_capable service=osd command= read on cap allow * 2016-03-08 15:19:02.489056 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.489057 7f82a5352700 20 allow all 2016-03-08 15:19:02.489058 7f82a5352700 10 mon.a@0(leader).osd e1 check_sub 0x559cd16f8760 next 0 (onetime) 2016-03-08 15:19:02.489062 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1829523028 -- osd_map(1..1 src has 1..1) v3 -- ?+0 0x559cd1562300 con 0x559cd1478400 2016-03-08 15:19:02.489065 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message osd_map(1..1 src has 1..1) v3 remote, 127.0.0.1:0/1829523028, have pipe. 2016-03-08 15:19:02.489077 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 23/104857600 2016-03-08 15:19:02.489081 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd14d7800 2016-03-08 15:19:02.489083 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.489088 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer encoding 4 features 576460752303423487 0x559cd1562300 osd_map(1..1 src has 1..1) v3 2016-03-08 15:19:02.489095 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer signed seq # 4): sig = 0 2016-03-08 15:19:02.489098 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sending 4 0x559cd1562300 2016-03-08 15:19:02.489123 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.489129 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sleeping 2016-03-08 15:19:02.504203 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got ACK 2016-03-08 15:19:02.504222 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got ack seq 3 2016-03-08 15:19:02.504225 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader reading tag... 2016-03-08 15:19:02.504228 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got ACK 2016-03-08 15:19:02.504241 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got ack seq 4 2016-03-08 15:19:02.504243 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader reading tag... 2016-03-08 15:19:02.504245 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got MSG 2016-03-08 15:19:02.504248 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got envelope type=50 src client.4097 front=80 data=0 off 0 2016-03-08 15:19:02.504253 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader wants 80 bytes from policy throttler 0/104857600 2016-03-08 15:19:02.504257 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader wants 80 from dispatch throttler 0/104857600 2016-03-08 15:19:02.504262 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got front 80 2016-03-08 15:19:02.504265 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).aborted = 0 2016-03-08 15:19:02.504267 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got 80 + 0 + 0 byte message 2016-03-08 15:19:02.504279 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got message 4 0x559cd15561c0 mon_command({"prefix": "get_command_descriptions"} v 0) v1 2016-03-08 15:19:02.504284 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd15561c0 prio 127 2016-03-08 15:19:02.504289 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader reading tag... 2016-03-08 15:19:02.504298 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.504302 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4097 127.0.0.1:0/1829523028 4 ==== mon_command({"prefix": "get_command_descriptions"} v 0) v1 ==== 80+0+0 (3134233619 0 0) 0x559cd15561c0 con 0x559cd1478400 2016-03-08 15:19:02.504312 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).write_ack 4 2016-03-08 15:19:02.504320 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.504323 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sleeping 2016-03-08 15:19:02.504332 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d7200 for client.? 127.0.0.1:0/1829523028 2016-03-08 15:19:02.504336 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.505471 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd17752a0 0x559cd1555200 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:02.505474 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1829523028 -- mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 -- ?+37345 0x559cd1555200 con 0x559cd1478400 2016-03-08 15:19:02.505478 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 remote, 127.0.0.1:0/1829523028, have pipe. 2016-03-08 15:19:02.505507 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 80 to dispatch throttler 80/104857600 2016-03-08 15:19:02.505502 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.505510 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15561c0 2016-03-08 15:19:02.505512 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer encoding 5 features 576460752303423487 0x559cd1555200 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:02.505523 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer signed seq # 5): sig = 0 2016-03-08 15:19:02.505530 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sending 5 0x559cd1555200 2016-03-08 15:19:02.505573 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.505581 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sleeping 2016-03-08 15:19:02.554304 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got ACK 2016-03-08 15:19:02.554320 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got ack seq 5 2016-03-08 15:19:02.554333 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader reading tag... 2016-03-08 15:19:02.554335 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got MSG 2016-03-08 15:19:02.554338 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got envelope type=50 src client.4097 front=143 data=0 off 0 2016-03-08 15:19:02.554342 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader wants 143 bytes from policy throttler 0/104857600 2016-03-08 15:19:02.554346 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader wants 143 from dispatch throttler 0/104857600 2016-03-08 15:19:02.554350 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got front 143 2016-03-08 15:19:02.554353 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).aborted = 0 2016-03-08 15:19:02.554355 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got 143 + 0 + 0 byte message 2016-03-08 15:19:02.554365 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader got message 5 0x559cd1555f80 mon_command({"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"} v 0) v1 2016-03-08 15:19:02.554381 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1555f80 prio 127 2016-03-08 15:19:02.554388 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader reading tag... 2016-03-08 15:19:02.554390 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.554396 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4097 127.0.0.1:0/1829523028 5 ==== mon_command({"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"} v 0) v1 ==== 143+0+0 (2990663796 0 0) 0x559cd1555f80 con 0x559cd1478400 2016-03-08 15:19:02.554400 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).write_ack 5 2016-03-08 15:19:02.554407 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.554408 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sleeping 2016-03-08 15:19:02.554420 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d7200 for client.? 127.0.0.1:0/1829523028 2016-03-08 15:19:02.554433 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.554475 7f82a5352700 0 mon.a@0(leader) e1 handle_command mon_command({"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"} v 0) v1 2016-03-08 15:19:02.554503 7f82a5352700 20 is_capable service=osd command=osd pool delete read write on cap allow * 2016-03-08 15:19:02.554505 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.554506 7f82a5352700 20 allow all 2016-03-08 15:19:02.554506 7f82a5352700 10 mon.a@0(leader) e1 _allowed_command capable 2016-03-08 15:19:02.554512 7f82a5352700 0 log_channel(audit) log [INF] : from='client.? 127.0.0.1:0/1829523028' entity='client.admin' cmd=[{"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"}]: dispatch 2016-03-08 15:19:02.554525 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 8 at 2016-03-08 15:19:02.554514) v1 -- ?+0 0x559cd17cb180 con 0x559cd1477080 2016-03-08 15:19:02.554529 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 8 at 2016-03-08 15:19:02.554514) v1 local 2016-03-08 15:19:02.554537 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..1) dispatch 0x559cd1555f80 mon_command({"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"} v 0) v1 from client.4097 127.0.0.1:0/1829523028 con 0x559cd1478400 2016-03-08 15:19:02.554552 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..7) is_readable = 1 - now=2016-03-08 15:19:02.554552 lease_expire=0.000000 has v0 lc 7 2016-03-08 15:19:02.554574 7f82a5352700 10 mon.a@0(leader).osd e1 preprocess_query mon_command({"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"} v 0) v1 from client.4097 127.0.0.1:0/1829523028 2016-03-08 15:19:02.554613 7f82a5352700 7 mon.a@0(leader).osd e1 prepare_update mon_command({"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"} v 0) v1 from client.4097 127.0.0.1:0/1829523028 2016-03-08 15:19:02.554645 7f82a5352700 10 mon.a@0(leader).osd e1 _prepare_remove_pool 0 2016-03-08 15:19:02.554657 7f82a5352700 10 mon.a@0(leader).osd e1 should_propose 2016-03-08 15:19:02.554659 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..1) propose_pending 2016-03-08 15:19:02.554660 7f82a5352700 10 mon.a@0(leader).osd e1 encode_pending e 2 2016-03-08 15:19:02.554700 7f82a5352700 20 mon.a@0(leader).osd e1 full_crc 3685405828 inc_crc 3205496377 2016-03-08 15:19:02.554707 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..7) queue_pending_finisher 0x559cd14944a0 2016-03-08 15:19:02.554710 7f82a5352700 10 mon.a@0(leader).paxos(paxos active c 1..7) trigger_propose active, proposing now 2016-03-08 15:19:02.554713 7f82a5352700 10 mon.a@0(leader).paxos(paxos active c 1..7) propose_pending 8 883 bytes 2016-03-08 15:19:02.554715 7f82a5352700 10 mon.a@0(leader).paxos(paxos updating c 1..7) begin for 8 883 bytes 2016-03-08 15:19:02.562762 7f82a5352700 10 mon.a@0(leader).paxos(paxos updating c 1..7) commit_start 8 2016-03-08 15:19:02.562783 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 143 to dispatch throttler 143/104857600 2016-03-08 15:19:02.562789 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555f80 2016-03-08 15:19:02.562792 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 8 at 2016-03-08 15:19:02.554514) v1 ==== 0+0+0 (0 0 0) 0x559cd17cb180 con 0x559cd1477080 2016-03-08 15:19:02.562818 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.562822 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.562829 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..3) dispatch 0x559cd17cb180 log(1 entries from seq 8 at 2016-03-08 15:19:02.554514) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.562834 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..7) is_readable = 1 - now=2016-03-08 15:19:02.562835 lease_expire=0.000000 has v0 lc 7 2016-03-08 15:19:02.562848 7f82a5352700 10 mon.a@0(leader).log v3 preprocess_query log(1 entries from seq 8 at 2016-03-08 15:19:02.554514) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.562860 7f82a5352700 10 mon.a@0(leader).log v3 preprocess_log log(1 entries from seq 8 at 2016-03-08 15:19:02.554514) v1 from mon.0 2016-03-08 15:19:02.562864 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.562865 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.562866 7f82a5352700 20 allow all 2016-03-08 15:19:02.562871 7f82a5352700 10 mon.a@0(leader).log v3 prepare_update log(1 entries from seq 8 at 2016-03-08 15:19:02.554514) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.562876 7f82a5352700 10 mon.a@0(leader).log v3 prepare_log log(1 entries from seq 8 at 2016-03-08 15:19:02.554514) v1 from mon.0 2016-03-08 15:19:02.562878 7f82a5352700 10 mon.a@0(leader).log v3 logging 2016-03-08 15:19:02.554514 mon.0 127.0.0.1:7104/0 8 : audit [INF] from='client.? 127.0.0.1:0/1829523028' entity='client.admin' cmd=[{"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"}]: dispatch 2016-03-08 15:19:02.562891 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..3) setting proposal_timer 0x559cd1494390 with delay of 0.0253534 2016-03-08 15:19:02.562901 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17cb180 2016-03-08 15:19:02.569060 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..7) commit_finish 8 2016-03-08 15:19:02.569085 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.569100 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) refresh 2016-03-08 15:19:02.569111 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.569118 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) refresh 2016-03-08 15:19:02.569120 7f82a6b55700 15 mon.a@0(leader).osd e1 update_from_paxos paxos e 2, my e 1 2016-03-08 15:19:02.569130 7f82a6b55700 7 mon.a@0(leader).osd e1 update_from_paxos applying incremental 2 2016-03-08 15:19:02.569154 7f82a6b55700 1 mon.a@0(leader).osd e2 e2: 0 osds: 0 up, 0 in 2016-03-08 15:19:02.576115 7f82a6b55700 5 mon.a@0(leader).paxos(paxos refresh c 1..8) is_readable = 0 - now=2016-03-08 15:19:02.576117 lease_expire=0.000000 has v0 lc 8 2016-03-08 15:19:02.576126 7f82a6b55700 10 mon.a@0(leader).pg v2 check_osd_map -- osdmap not readable, waiting 2016-03-08 15:19:02.576128 7f82a6b55700 10 mon.a@0(leader).osd e2 check_subs 2016-03-08 15:19:02.576130 7f82a6b55700 10 mon.a@0(leader).osd e2 share_map_with_random_osd no up osds, don't share with anyone 2016-03-08 15:19:02.576131 7f82a6b55700 10 mon.a@0(leader).osd e2 update_logger 2016-03-08 15:19:02.576134 7f82a6b55700 0 mon.a@0(leader).osd e2 crush map has features 33816576, adjusting msgr requires 2016-03-08 15:19:02.576136 7f82a6b55700 0 mon.a@0(leader).osd e2 crush map has features 33816576, adjusting msgr requires 2016-03-08 15:19:02.576138 7f82a6b55700 0 mon.a@0(leader).osd e2 crush map has features 33816576, adjusting msgr requires 2016-03-08 15:19:02.576139 7f82a6b55700 0 mon.a@0(leader).osd e2 crush map has features 33816576, adjusting msgr requires 2016-03-08 15:19:02.576156 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..3) refresh 2016-03-08 15:19:02.576157 7f82a6b55700 10 mon.a@0(leader).log v3 update_from_paxos 2016-03-08 15:19:02.576159 7f82a6b55700 10 mon.a@0(leader).log v3 update_from_paxos version 3 summary v 3 2016-03-08 15:19:02.576167 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.576175 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:02.576178 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:02.576179 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) post_refresh 2016-03-08 15:19:02.576181 7f82a6b55700 10 mon.a@0(leader).pg v2 post_paxos_update 2016-03-08 15:19:02.576182 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.576182 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) post_refresh 2016-03-08 15:19:02.576183 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..3) post_refresh 2016-03-08 15:19:02.576184 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.576184 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:02.576185 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..8) commit_proposal 2016-03-08 15:19:02.576188 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) _active - not active 2016-03-08 15:19:02.576189 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..8) finish_round 2016-03-08 15:19:02.576190 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..8) finish_round waiting_for_acting 2016-03-08 15:19:02.576193 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) _active 2016-03-08 15:19:02.576194 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) remove_legacy_versions 2016-03-08 15:19:02.576198 7f82a6b55700 7 mon.a@0(leader).paxosservice(osdmap 1..2) _active creating new pending 2016-03-08 15:19:02.576201 7f82a6b55700 10 mon.a@0(leader).osd e2 create_pending e 3 2016-03-08 15:19:02.576220 7f82a6b55700 0 log_channel(audit) log [INF] : from='client.? 127.0.0.1:0/1829523028' entity='client.admin' cmd='[{"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"}]': finished 2016-03-08 15:19:02.576231 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 9 at 2016-03-08 15:19:02.576222) v1 -- ?+0 0x559cd17cb400 con 0x559cd1477080 2016-03-08 15:19:02.576235 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 9 at 2016-03-08 15:19:02.576222) v1 local 2016-03-08 15:19:02.576244 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd1775cc0 0x559cd1556400 mon_command_ack([{"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"}]=0 pool 'rbd' removed v2) v1 2016-03-08 15:19:02.576246 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1829523028 -- mon_command_ack([{"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"}]=0 pool 'rbd' removed v2) v1 -- ?+0 0x559cd1556400 con 0x559cd1478400 2016-03-08 15:19:02.576249 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"}]=0 pool 'rbd' removed v2) v1 remote, 127.0.0.1:0/1829523028, have pipe. 2016-03-08 15:19:02.576270 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..8) is_readable = 1 - now=2016-03-08 15:19:02.576271 lease_expire=0.000000 has v0 lc 8 2016-03-08 15:19:02.576275 7f82a6b55700 10 mon.a@0(leader).pg v2 check_osd_map applying osdmap e2 to pg_map 2016-03-08 15:19:02.576273 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.576281 7f82a6b55700 10 mon.a@0(leader).pg v2 map_pg_creates to 64 pgs, osdmap epoch 2 2016-03-08 15:19:02.576270 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 9 at 2016-03-08 15:19:02.576222) v1 ==== 0+0+0 (0 0 0) 0x559cd17cb400 con 0x559cd1477080 2016-03-08 15:19:02.576283 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer encoding 6 features 576460752303423487 0x559cd1556400 mon_command_ack([{"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"}]=0 pool 'rbd' removed v2) v1 2016-03-08 15:19:02.576292 7f82a6b55700 10 mon.a@0(leader).pg v2 register_new_pgs checking pg pools for osdmap epoch 2, last_pg_scan 1 2016-03-08 15:19:02.576293 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.0 because containing pool deleted 2016-03-08 15:19:02.576294 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.1 because containing pool deleted 2016-03-08 15:19:02.576295 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.2 because containing pool deleted 2016-03-08 15:19:02.576296 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.3 because containing pool deleted 2016-03-08 15:19:02.576297 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.4 because containing pool deleted 2016-03-08 15:19:02.576297 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.5 because containing pool deleted 2016-03-08 15:19:02.576298 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.6 because containing pool deleted 2016-03-08 15:19:02.576299 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.7 because containing pool deleted 2016-03-08 15:19:02.576300 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.8 because containing pool deleted 2016-03-08 15:19:02.576300 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.9 because containing pool deleted 2016-03-08 15:19:02.576298 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer signed seq # 6): sig = 0 2016-03-08 15:19:02.576301 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.a because containing pool deleted 2016-03-08 15:19:02.576302 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.b because containing pool deleted 2016-03-08 15:19:02.576303 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.c because containing pool deleted 2016-03-08 15:19:02.576302 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sending 6 0x559cd1556400 2016-03-08 15:19:02.576304 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.d because containing pool deleted 2016-03-08 15:19:02.576305 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.e because containing pool deleted 2016-03-08 15:19:02.576306 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.f because containing pool deleted 2016-03-08 15:19:02.576306 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.10 because containing pool deleted 2016-03-08 15:19:02.576307 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.11 because containing pool deleted 2016-03-08 15:19:02.576308 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.12 because containing pool deleted 2016-03-08 15:19:02.576309 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.13 because containing pool deleted 2016-03-08 15:19:02.576309 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.14 because containing pool deleted 2016-03-08 15:19:02.576310 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.15 because containing pool deleted 2016-03-08 15:19:02.576311 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.16 because containing pool deleted 2016-03-08 15:19:02.576311 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.17 because containing pool deleted 2016-03-08 15:19:02.576312 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.18 because containing pool deleted 2016-03-08 15:19:02.576313 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.19 because containing pool deleted 2016-03-08 15:19:02.576313 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.1a because containing pool deleted 2016-03-08 15:19:02.576314 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.1b because containing pool deleted 2016-03-08 15:19:02.576315 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.1c because containing pool deleted 2016-03-08 15:19:02.576316 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.1d because containing pool deleted 2016-03-08 15:19:02.576317 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.1e because containing pool deleted 2016-03-08 15:19:02.576317 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.1f because containing pool deleted 2016-03-08 15:19:02.576318 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.20 because containing pool deleted 2016-03-08 15:19:02.576319 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.21 because containing pool deleted 2016-03-08 15:19:02.576320 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.22 because containing pool deleted 2016-03-08 15:19:02.576321 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.23 because containing pool deleted 2016-03-08 15:19:02.576321 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.24 because containing pool deleted 2016-03-08 15:19:02.576322 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.25 because containing pool deleted 2016-03-08 15:19:02.576323 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.26 because containing pool deleted 2016-03-08 15:19:02.576323 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.27 because containing pool deleted 2016-03-08 15:19:02.576324 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.28 because containing pool deleted 2016-03-08 15:19:02.576325 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.29 because containing pool deleted 2016-03-08 15:19:02.576325 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.2a because containing pool deleted 2016-03-08 15:19:02.576326 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.2b because containing pool deleted 2016-03-08 15:19:02.576327 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.2c because containing pool deleted 2016-03-08 15:19:02.576328 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.2d because containing pool deleted 2016-03-08 15:19:02.576329 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.2e because containing pool deleted 2016-03-08 15:19:02.576329 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.2f because containing pool deleted 2016-03-08 15:19:02.576337 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.30 because containing pool deleted 2016-03-08 15:19:02.576338 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.31 because containing pool deleted 2016-03-08 15:19:02.576336 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer: state = open policy.server=1 2016-03-08 15:19:02.576339 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.32 because containing pool deleted 2016-03-08 15:19:02.576340 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.33 because containing pool deleted 2016-03-08 15:19:02.576340 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.34 because containing pool deleted 2016-03-08 15:19:02.576339 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).writer sleeping 2016-03-08 15:19:02.576341 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.35 because containing pool deleted 2016-03-08 15:19:02.576343 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.36 because containing pool deleted 2016-03-08 15:19:02.576343 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.37 because containing pool deleted 2016-03-08 15:19:02.576344 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.38 because containing pool deleted 2016-03-08 15:19:02.576345 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.39 because containing pool deleted 2016-03-08 15:19:02.576345 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.3a because containing pool deleted 2016-03-08 15:19:02.576346 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.3b because containing pool deleted 2016-03-08 15:19:02.576347 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.3c because containing pool deleted 2016-03-08 15:19:02.576349 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.3d because containing pool deleted 2016-03-08 15:19:02.576350 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.3e because containing pool deleted 2016-03-08 15:19:02.576351 7f82a6b55700 20 mon.a@0(leader).pg v2 removing creating_pg 0.3f because containing pool deleted 2016-03-08 15:19:02.576352 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.3f because containing pool deleted 2016-03-08 15:19:02.576353 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.3e because containing pool deleted 2016-03-08 15:19:02.576353 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.3d because containing pool deleted 2016-03-08 15:19:02.576354 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.3c because containing pool deleted 2016-03-08 15:19:02.576355 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.3b because containing pool deleted 2016-03-08 15:19:02.576355 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.3a because containing pool deleted 2016-03-08 15:19:02.576356 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.39 because containing pool deleted 2016-03-08 15:19:02.576357 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.38 because containing pool deleted 2016-03-08 15:19:02.576357 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.37 because containing pool deleted 2016-03-08 15:19:02.576358 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.36 because containing pool deleted 2016-03-08 15:19:02.576359 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.35 because containing pool deleted 2016-03-08 15:19:02.576359 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.34 because containing pool deleted 2016-03-08 15:19:02.576360 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.33 because containing pool deleted 2016-03-08 15:19:02.576361 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.32 because containing pool deleted 2016-03-08 15:19:02.576361 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.31 because containing pool deleted 2016-03-08 15:19:02.576362 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.30 because containing pool deleted 2016-03-08 15:19:02.576362 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.2f because containing pool deleted 2016-03-08 15:19:02.576363 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.2e because containing pool deleted 2016-03-08 15:19:02.576364 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.15 because containing pool deleted 2016-03-08 15:19:02.576364 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.14 because containing pool deleted 2016-03-08 15:19:02.576365 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.13 because containing pool deleted 2016-03-08 15:19:02.576366 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.12 because containing pool deleted 2016-03-08 15:19:02.576367 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.11 because containing pool deleted 2016-03-08 15:19:02.576367 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.10 because containing pool deleted 2016-03-08 15:19:02.576368 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.f because containing pool deleted 2016-03-08 15:19:02.576369 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.e because containing pool deleted 2016-03-08 15:19:02.576370 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.d because containing pool deleted 2016-03-08 15:19:02.576371 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.c because containing pool deleted 2016-03-08 15:19:02.576372 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.b because containing pool deleted 2016-03-08 15:19:02.576373 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.a because containing pool deleted 2016-03-08 15:19:02.576374 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.3 because containing pool deleted 2016-03-08 15:19:02.576375 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.2 because containing pool deleted 2016-03-08 15:19:02.576376 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.1 because containing pool deleted 2016-03-08 15:19:02.576377 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.0 because containing pool deleted 2016-03-08 15:19:02.576378 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.4 because containing pool deleted 2016-03-08 15:19:02.576379 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.5 because containing pool deleted 2016-03-08 15:19:02.576380 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.6 because containing pool deleted 2016-03-08 15:19:02.576382 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.7 because containing pool deleted 2016-03-08 15:19:02.576383 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.8 because containing pool deleted 2016-03-08 15:19:02.576384 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.9 because containing pool deleted 2016-03-08 15:19:02.576385 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.16 because containing pool deleted 2016-03-08 15:19:02.576386 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.17 because containing pool deleted 2016-03-08 15:19:02.576388 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.18 because containing pool deleted 2016-03-08 15:19:02.576389 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.19 because containing pool deleted 2016-03-08 15:19:02.576390 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.1a because containing pool deleted 2016-03-08 15:19:02.576390 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.1b because containing pool deleted 2016-03-08 15:19:02.576391 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.1c because containing pool deleted 2016-03-08 15:19:02.576391 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.1d because containing pool deleted 2016-03-08 15:19:02.576397 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.1e because containing pool deleted 2016-03-08 15:19:02.576398 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.1f because containing pool deleted 2016-03-08 15:19:02.576399 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.20 because containing pool deleted 2016-03-08 15:19:02.576399 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.21 because containing pool deleted 2016-03-08 15:19:02.576400 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.22 because containing pool deleted 2016-03-08 15:19:02.576400 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.23 because containing pool deleted 2016-03-08 15:19:02.576401 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.24 because containing pool deleted 2016-03-08 15:19:02.576402 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.25 because containing pool deleted 2016-03-08 15:19:02.576402 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.26 because containing pool deleted 2016-03-08 15:19:02.576403 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.27 because containing pool deleted 2016-03-08 15:19:02.576404 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.28 because containing pool deleted 2016-03-08 15:19:02.576404 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.29 because containing pool deleted 2016-03-08 15:19:02.576405 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.2a because containing pool deleted 2016-03-08 15:19:02.576406 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.2b because containing pool deleted 2016-03-08 15:19:02.576406 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.2c because containing pool deleted 2016-03-08 15:19:02.576407 7f82a6b55700 20 mon.a@0(leader).pg v2 removing pg_stat 0.2d because containing pool deleted 2016-03-08 15:19:02.576408 7f82a6b55700 10 mon.a@0(leader).pg v2 register_new_pgs registered 0 new pgs, removed 128 uncreated pgs 2016-03-08 15:19:02.576409 7f82a6b55700 10 mon.a@0(leader).pg v2 check_down_pgs 2016-03-08 15:19:02.576415 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..2) propose_pending 2016-03-08 15:19:02.576417 7f82a6b55700 10 mon.a@0(leader).pg v2 encode_pending v 3 2016-03-08 15:19:02.576458 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..8) queue_pending_finisher 0x559cd14944a0 2016-03-08 15:19:02.576459 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..8) trigger_propose active, proposing now 2016-03-08 15:19:02.576478 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..8) propose_pending 9 3786 bytes 2016-03-08 15:19:02.576479 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..8) begin for 9 3786 bytes 2016-03-08 15:19:02.582697 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..8) commit_start 9 2016-03-08 15:19:02.582739 7f82a6b55700 10 mon.a@0(leader).osd e2 update_logger 2016-03-08 15:19:02.582745 7f82a6b55700 0 log_channel(cluster) log [INF] : osdmap e2: 0 osds: 0 up, 0 in 2016-03-08 15:19:02.582758 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 10 at 2016-03-08 15:19:02.582746) v1 -- ?+0 0x559cd1561e00 con 0x559cd1477080 2016-03-08 15:19:02.582764 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 10 at 2016-03-08 15:19:02.582746) v1 local 2016-03-08 15:19:02.582771 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..8) finish_round waiting_for_readable 2016-03-08 15:19:02.582772 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..8) finish_round waiting_for_writeable 2016-03-08 15:19:02.582773 7f82a6b55700 10 mon.a@0(leader).paxos(paxos writing c 1..8) finish_round done w/ waiters, state 4 2016-03-08 15:19:02.582821 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.582826 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.582841 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..3) dispatch 0x559cd17cb400 log(1 entries from seq 9 at 2016-03-08 15:19:02.576222) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.582846 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..8) is_readable = 1 - now=2016-03-08 15:19:02.582846 lease_expire=0.000000 has v0 lc 8 2016-03-08 15:19:02.582857 7f82a5352700 10 mon.a@0(leader).log v3 preprocess_query log(1 entries from seq 9 at 2016-03-08 15:19:02.576222) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.582869 7f82a5352700 10 mon.a@0(leader).log v3 preprocess_log log(1 entries from seq 9 at 2016-03-08 15:19:02.576222) v1 from mon.0 2016-03-08 15:19:02.582872 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.582874 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.582875 7f82a5352700 20 allow all 2016-03-08 15:19:02.582879 7f82a5352700 10 mon.a@0(leader).log v3 prepare_update log(1 entries from seq 9 at 2016-03-08 15:19:02.576222) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.582886 7f82a5352700 10 mon.a@0(leader).log v3 prepare_log log(1 entries from seq 9 at 2016-03-08 15:19:02.576222) v1 from mon.0 2016-03-08 15:19:02.582888 7f82a5352700 10 mon.a@0(leader).log v3 logging 2016-03-08 15:19:02.576222 mon.0 127.0.0.1:7104/0 9 : audit [INF] from='client.? 127.0.0.1:0/1829523028' entity='client.admin' cmd='[{"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"}]': finished 2016-03-08 15:19:02.582895 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..3) proposal_timer already set 2016-03-08 15:19:02.582897 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17cb400 2016-03-08 15:19:02.582900 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 10 at 2016-03-08 15:19:02.582746) v1 ==== 0+0+0 (0 0 0) 0x559cd1561e00 con 0x559cd1477080 2016-03-08 15:19:02.582923 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.582926 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.582932 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..3) dispatch 0x559cd1561e00 log(1 entries from seq 10 at 2016-03-08 15:19:02.582746) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.582937 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..8) is_readable = 1 - now=2016-03-08 15:19:02.582937 lease_expire=0.000000 has v0 lc 8 2016-03-08 15:19:02.582942 7f82a5352700 10 mon.a@0(leader).log v3 preprocess_query log(1 entries from seq 10 at 2016-03-08 15:19:02.582746) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.582948 7f82a5352700 10 mon.a@0(leader).log v3 preprocess_log log(1 entries from seq 10 at 2016-03-08 15:19:02.582746) v1 from mon.0 2016-03-08 15:19:02.582965 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.582966 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.582969 7f82a5352700 20 allow all 2016-03-08 15:19:02.582974 7f82a5352700 10 mon.a@0(leader).log v3 prepare_update log(1 entries from seq 10 at 2016-03-08 15:19:02.582746) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.582979 7f82a5352700 10 mon.a@0(leader).log v3 prepare_log log(1 entries from seq 10 at 2016-03-08 15:19:02.582746) v1 from mon.0 2016-03-08 15:19:02.582982 7f82a5352700 10 mon.a@0(leader).log v3 logging 2016-03-08 15:19:02.582746 mon.0 127.0.0.1:7104/0 10 : cluster [INF] osdmap e2: 0 osds: 0 up, 0 in 2016-03-08 15:19:02.582990 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..3) proposal_timer already set 2016-03-08 15:19:02.582993 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1561e00 2016-03-08 15:19:02.586626 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).reader couldn't read tag, (0) Success 2016-03-08 15:19:02.586647 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).fault (0) Success 2016-03-08 15:19:02.586687 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).fault on lossy channel, failing 2016-03-08 15:19:02.586693 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1478400).stop 2016-03-08 15:19:02.586703 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1478400).unregister_pipe 2016-03-08 15:19:02.586709 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1478400).discard_queue 2016-03-08 15:19:02.586719 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1478400).reader done 2016-03-08 15:19:02.586728 7f82a5352700 10 mon.a@0(leader) e1 ms_handle_reset 0x559cd1478400 127.0.0.1:0/1829523028 2016-03-08 15:19:02.586726 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1478400).writer finishing 2016-03-08 15:19:02.586735 7f82a5352700 10 mon.a@0(leader) e1 reset/close on session client.? 127.0.0.1:0/1829523028 2016-03-08 15:19:02.586738 7f82a5352700 10 mon.a@0(leader) e1 remove_session 0x559cd14d7200 client.? 127.0.0.1:0/1829523028 2016-03-08 15:19:02.586738 7f82a334e700 10 -- 127.0.0.1:7104/0 queue_reap 0x559cd17aa000 2016-03-08 15:19:02.586745 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1478400).writer done 2016-03-08 15:19:02.586768 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper 2016-03-08 15:19:02.586780 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaping pipe 0x559cd17aa000 127.0.0.1:0/1829523028 2016-03-08 15:19:02.586784 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1478400).discard_queue 2016-03-08 15:19:02.586790 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1478400).unregister_pipe - not registered 2016-03-08 15:19:02.586795 7f82a6354700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1829523028 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1478400).join 2016-03-08 15:19:02.586806 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaped pipe 0x559cd17aa000 127.0.0.1:0/1829523028 2016-03-08 15:19:02.586812 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper deleted pipe 0x559cd17aa000 2016-03-08 15:19:02.586814 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper done 2016-03-08 15:19:02.588328 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..3) propose_pending 2016-03-08 15:19:02.588336 7f82a5b53700 10 mon.a@0(leader).log v3 encode_full log v 3 2016-03-08 15:19:02.588350 7f82a5b53700 10 mon.a@0(leader).log v3 encode_pending v4 2016-03-08 15:19:02.588359 7f82a5b53700 5 mon.a@0(leader).paxos(paxos writing c 1..8) queue_pending_finisher 0x559cd1494200 2016-03-08 15:19:02.588362 7f82a5b53700 10 mon.a@0(leader).paxos(paxos writing c 1..8) trigger_propose not active, will propose later 2016-03-08 15:19:02.589171 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..8) commit_finish 9 2016-03-08 15:19:02.589200 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.589215 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) refresh 2016-03-08 15:19:02.589218 7f82a6b55700 10 mon.a@0(leader).pg v2 update_from_paxos read_incremental 2016-03-08 15:19:02.589233 7f82a6b55700 20 mon.a@0(leader).pg v2 refreshing pg 0.0 got -2 len 0 2016-03-08 15:19:02.589288 7f82a6b55700 20 mon.a@0(leader).pg v2 deleted pool 0 2016-03-08 15:19:02.589292 7f82a6b55700 10 mon.a@0(leader).pg v3 read_pgmap_meta 2016-03-08 15:19:02.589307 7f82a6b55700 10 mon.a@0(leader).pg v3 update_logger 2016-03-08 15:19:02.589317 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.589326 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) refresh 2016-03-08 15:19:02.589335 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..3) refresh 2016-03-08 15:19:02.589338 7f82a6b55700 10 mon.a@0(leader).log v3 update_from_paxos 2016-03-08 15:19:02.589339 7f82a6b55700 10 mon.a@0(leader).log v3 update_from_paxos version 3 summary v 3 2016-03-08 15:19:02.589347 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.589355 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:02.589358 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:02.589360 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) post_refresh 2016-03-08 15:19:02.589361 7f82a6b55700 10 mon.a@0(leader).pg v3 post_paxos_update 2016-03-08 15:19:02.589362 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.589362 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) post_refresh 2016-03-08 15:19:02.589371 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..3) post_refresh 2016-03-08 15:19:02.589372 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.589373 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:02.589374 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..9) commit_proposal 2016-03-08 15:19:02.589376 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) _active - not active 2016-03-08 15:19:02.589378 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..9) finish_round 2016-03-08 15:19:02.589379 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..9) finish_round waiting_for_acting 2016-03-08 15:19:02.589380 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) _active 2016-03-08 15:19:02.589381 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) remove_legacy_versions 2016-03-08 15:19:02.589385 7f82a6b55700 7 mon.a@0(leader).paxosservice(pgmap 1..3) _active creating new pending 2016-03-08 15:19:02.589390 7f82a6b55700 10 mon.a@0(leader).pg v3 create_pending v 4 2016-03-08 15:19:02.589391 7f82a6b55700 10 mon.a@0(leader).pg v3 check_osd_map already seen 2 >= 2 2016-03-08 15:19:02.589392 7f82a6b55700 10 mon.a@0(leader).pg v3 update_logger 2016-03-08 15:19:02.589401 7f82a6b55700 0 log_channel(cluster) log [INF] : pgmap v3: 0 pgs: ; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:02.589419 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 11 at 2016-03-08 15:19:02.589402) v1 -- ?+0 0x559cd17caa00 con 0x559cd1477080 2016-03-08 15:19:02.589426 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 11 at 2016-03-08 15:19:02.589402) v1 local 2016-03-08 15:19:02.589432 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..9) finish_round waiting_for_readable 2016-03-08 15:19:02.589435 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..9) finish_round waiting_for_writeable 2016-03-08 15:19:02.589436 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..9) finish_round done w/ waiters, state 1 2016-03-08 15:19:02.589442 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..9) propose_pending 10 2947 bytes 2016-03-08 15:19:02.589444 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..9) begin for 10 2947 bytes 2016-03-08 15:19:02.589473 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 11 at 2016-03-08 15:19:02.589402) v1 ==== 0+0+0 (0 0 0) 0x559cd17caa00 con 0x559cd1477080 2016-03-08 15:19:02.595976 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..9) commit_start 10 2016-03-08 15:19:02.596055 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.596065 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.596086 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..3) dispatch 0x559cd17caa00 log(1 entries from seq 11 at 2016-03-08 15:19:02.589402) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.596091 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..9) is_readable = 1 - now=2016-03-08 15:19:02.596092 lease_expire=0.000000 has v0 lc 9 2016-03-08 15:19:02.596101 7f82a5352700 10 mon.a@0(leader).log v3 preprocess_query log(1 entries from seq 11 at 2016-03-08 15:19:02.589402) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.596109 7f82a5352700 10 mon.a@0(leader).log v3 preprocess_log log(1 entries from seq 11 at 2016-03-08 15:19:02.589402) v1 from mon.0 2016-03-08 15:19:02.596113 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.596115 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.596115 7f82a5352700 20 allow all 2016-03-08 15:19:02.596118 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..3) waiting for paxos -> writeable 2016-03-08 15:19:02.596132 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17caa00 2016-03-08 15:19:02.602216 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..9) commit_finish 10 2016-03-08 15:19:02.602247 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.602260 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) refresh 2016-03-08 15:19:02.602268 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.602283 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) refresh 2016-03-08 15:19:02.602290 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..4) refresh 2016-03-08 15:19:02.602291 7f82a6b55700 10 mon.a@0(leader).log v4 update_from_paxos 2016-03-08 15:19:02.602293 7f82a6b55700 10 mon.a@0(leader).log v4 update_from_paxos version 4 summary v 3 2016-03-08 15:19:02.602296 7f82a6b55700 10 mon.a@0(leader).log v4 update_from_paxos latest full 3 2016-03-08 15:19:02.602303 7f82a6b55700 7 mon.a@0(leader).log v4 update_from_paxos applying incremental log 4 2016-03-08 15:19:02.554514 mon.0 127.0.0.1:7104/0 8 : audit [INF] from='client.? 127.0.0.1:0/1829523028' entity='client.admin' cmd=[{"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"}]: dispatch 2016-03-08 15:19:02.602317 7f82a6b55700 20 mon.a@0(leader).log v4 update_from_paxos logging for channel 'audit' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.602325 7f82a6b55700 7 mon.a@0(leader).log v4 update_from_paxos applying incremental log 4 2016-03-08 15:19:02.576222 mon.0 127.0.0.1:7104/0 9 : audit [INF] from='client.? 127.0.0.1:0/1829523028' entity='client.admin' cmd='[{"pool2": "rbd", "prefix": "osd pool delete", "sure": "--yes-i-really-really-mean-it", "pool": "rbd"}]': finished 2016-03-08 15:19:02.602329 7f82a6b55700 20 mon.a@0(leader).log v4 update_from_paxos logging for channel 'audit' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.602334 7f82a6b55700 7 mon.a@0(leader).log v4 update_from_paxos applying incremental log 4 2016-03-08 15:19:02.582746 mon.0 127.0.0.1:7104/0 10 : cluster [INF] osdmap e2: 0 osds: 0 up, 0 in 2016-03-08 15:19:02.602337 7f82a6b55700 20 mon.a@0(leader).log v4 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.602341 7f82a6b55700 15 mon.a@0(leader).log v4 update_from_paxos logging for 2 channels 2016-03-08 15:19:02.602342 7f82a6b55700 15 mon.a@0(leader).log v4 update_from_paxos channel 'audit' logging 492 bytes 2016-03-08 15:19:02.602357 7f82a6b55700 15 mon.a@0(leader).log v4 update_from_paxos channel 'cluster' logging 99 bytes 2016-03-08 15:19:02.602362 7f82a6b55700 10 mon.a@0(leader).log v4 check_subs 2016-03-08 15:19:02.602374 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.602381 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:02.602382 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:02.602384 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) post_refresh 2016-03-08 15:19:02.602385 7f82a6b55700 10 mon.a@0(leader).pg v3 post_paxos_update 2016-03-08 15:19:02.602386 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.602387 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) post_refresh 2016-03-08 15:19:02.602389 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..4) post_refresh 2016-03-08 15:19:02.602389 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.602390 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:02.602391 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..10) commit_proposal 2016-03-08 15:19:02.602393 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..4) _active - not active 2016-03-08 15:19:02.602395 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..10) finish_round 2016-03-08 15:19:02.602403 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..10) finish_round waiting_for_acting 2016-03-08 15:19:02.602404 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..4) _active 2016-03-08 15:19:02.602405 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..4) remove_legacy_versions 2016-03-08 15:19:02.602409 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..4) _active creating new pending 2016-03-08 15:19:02.602412 7f82a6b55700 10 mon.a@0(leader).log v4 create_pending v 5 2016-03-08 15:19:02.602420 7f82a6b55700 7 mon.a@0(leader).log v4 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.602428 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd1775600 0x559cd1555f80 log(last 8) v1 2016-03-08 15:19:02.602431 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 8) v1 -- ?+0 0x559cd1555f80 con 0x559cd1477080 2016-03-08 15:19:02.602434 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 8) v1 local 2016-03-08 15:19:02.602455 7f82a6b55700 7 mon.a@0(leader).log v4 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.602466 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd17e6480 0x559cd15546c0 log(last 9) v1 2016-03-08 15:19:02.602468 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 9) v1 -- ?+0 0x559cd15546c0 con 0x559cd1477080 2016-03-08 15:19:02.602470 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 9) v1 local 2016-03-08 15:19:02.602469 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 8) v1 ==== 0+0+0 (0 0 0) 0x559cd1555f80 con 0x559cd1477080 2016-03-08 15:19:02.602484 7f82a6b55700 7 mon.a@0(leader).log v4 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.602489 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd17e6a20 0x559cd1556640 log(last 10) v1 2016-03-08 15:19:02.602491 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 10) v1 -- ?+0 0x559cd1556640 con 0x559cd1477080 2016-03-08 15:19:02.602493 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 10) v1 local 2016-03-08 15:19:02.602510 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..4) dispatch 0x559cd17caa00 log(1 entries from seq 11 at 2016-03-08 15:19:02.589402) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.602514 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..10) is_readable = 1 - now=2016-03-08 15:19:02.602514 lease_expire=0.000000 has v0 lc 10 2016-03-08 15:19:02.602522 7f82a6b55700 10 mon.a@0(leader).log v4 preprocess_query log(1 entries from seq 11 at 2016-03-08 15:19:02.589402) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.602532 7f82a6b55700 10 mon.a@0(leader).log v4 preprocess_log log(1 entries from seq 11 at 2016-03-08 15:19:02.589402) v1 from mon.0 2016-03-08 15:19:02.602535 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.602537 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.602538 7f82a6b55700 20 allow all 2016-03-08 15:19:02.602542 7f82a6b55700 10 mon.a@0(leader).log v4 prepare_update log(1 entries from seq 11 at 2016-03-08 15:19:02.589402) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.602547 7f82a6b55700 10 mon.a@0(leader).log v4 prepare_log log(1 entries from seq 11 at 2016-03-08 15:19:02.589402) v1 from mon.0 2016-03-08 15:19:02.602550 7f82a6b55700 10 mon.a@0(leader).log v4 logging 2016-03-08 15:19:02.589402 mon.0 127.0.0.1:7104/0 11 : cluster [INF] pgmap v3: 0 pgs: ; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:02.602559 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..4) setting proposal_timer 0x559cd1494200 with delay of 0.0996678 2016-03-08 15:19:02.602570 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..10) finish_round waiting_for_readable 2016-03-08 15:19:02.602572 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..10) finish_round waiting_for_writeable 2016-03-08 15:19:02.602573 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..10) finish_round done w/ waiters, state 1 2016-03-08 15:19:02.602597 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.602601 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.602605 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.602606 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.602608 7f82a5352700 20 allow all 2016-03-08 15:19:02.602621 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555f80 2016-03-08 15:19:02.602626 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 9) v1 ==== 0+0+0 (0 0 0) 0x559cd15546c0 con 0x559cd1477080 2016-03-08 15:19:02.602664 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.602667 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.602671 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.602671 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.602672 7f82a5352700 20 allow all 2016-03-08 15:19:02.602679 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15546c0 2016-03-08 15:19:02.602681 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 10) v1 ==== 0+0+0 (0 0 0) 0x559cd1556640 con 0x559cd1477080 2016-03-08 15:19:02.602693 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.602696 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.602698 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.602699 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.602699 7f82a5352700 20 allow all 2016-03-08 15:19:02.602706 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1556640 2016-03-08 15:19:02.684652 7f82a4350700 20 accepter.accepter poll got 1 2016-03-08 15:19:02.684664 7f82a4350700 10 accepter.pfd.revents=1 2016-03-08 15:19:02.684671 7f82a4350700 10 accepter.accepted incoming on sd 21 2016-03-08 15:19:02.684705 7f82a4350700 20 accepter.accepter calling poll 2016-03-08 15:19:02.684735 7f82aea26700 10 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd17ab400 sd=21 :0 s=0 pgs=0 cs=0 l=0 c=0x559cd1479480).accept 2016-03-08 15:19:02.684782 7f82aea26700 1 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1479480).accept sd=21 127.0.0.1:52220/0 2016-03-08 15:19:02.684858 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1479480).accept peer addr is 127.0.0.1:0/2094244407 2016-03-08 15:19:02.684869 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1479480).accept got peer connect_seq 0 global_seq 1 2016-03-08 15:19:02.684873 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1479480).accept of host_type 8, policy.lossy=1 policy.server=1 policy.standby=0 policy.resetcheck=0 2016-03-08 15:19:02.684876 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1479480).accept my proto 15, their proto 15 2016-03-08 15:19:02.684879 7f82aea26700 10 mon.a@0(leader) e1 ms_verify_authorizer 127.0.0.1:0/2094244407 client protocol 0 2016-03-08 15:19:02.684882 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1479480).accept: setting up session_security. 2016-03-08 15:19:02.684885 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1479480).accept new session 2016-03-08 15:19:02.684887 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).accept success, connect_seq = 1, sending READY 2016-03-08 15:19:02.684889 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).accept features 576460752303423487 2016-03-08 15:19:02.684896 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).register_pipe 2016-03-08 15:19:02.684915 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).discard_requeued_up_to 0 2016-03-08 15:19:02.684920 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).accept starting writer, state open 2016-03-08 15:19:02.684933 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).accept done 2016-03-08 15:19:02.684937 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader reading tag... 2016-03-08 15:19:02.684940 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.684964 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sleeping 2016-03-08 15:19:02.684969 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got KEEPALIVE2 2016-03-08 15:19:02.684947 2016-03-08 15:19:02.684985 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader reading tag... 2016-03-08 15:19:02.684998 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got MSG 2016-03-08 15:19:02.685004 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got envelope type=17 src client.? front=60 data=0 off 0 2016-03-08 15:19:02.685002 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685007 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader wants 60 bytes from policy throttler 0/104857600 2016-03-08 15:19:02.685010 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader wants 60 from dispatch throttler 0/104857600 2016-03-08 15:19:02.685014 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got front 60 2016-03-08 15:19:02.685009 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).write_keepalive2 15 2016-03-08 15:19:02.684947 2016-03-08 15:19:02.685017 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).aborted = 0 2016-03-08 15:19:02.685019 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got 60 + 0 + 0 byte message 2016-03-08 15:19:02.685026 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got message 1 0x559cd1563c00 auth(proto 0 30 bytes epoch 0) v1 2016-03-08 15:19:02.685033 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1563c00 prio 127 2016-03-08 15:19:02.685039 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader reading tag... 2016-03-08 15:19:02.685044 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).write_ack 1 2016-03-08 15:19:02.685043 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/2094244407 1 ==== auth(proto 0 30 bytes epoch 0) v1 ==== 60+0+0 (900162395 0 0) 0x559cd1563c00 con 0x559cd1479480 2016-03-08 15:19:02.685057 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685061 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sleeping 2016-03-08 15:19:02.685081 7f82a5352700 10 mon.a@0(leader) e1 _ms_dispatch new session 0x559cd14d7200 MonSession(client.? 127.0.0.1:0/2094244407 is open) 2016-03-08 15:19:02.685086 7f82a5352700 20 mon.a@0(leader) e1 caps 2016-03-08 15:19:02.685091 7f82a5352700 10 mon.a@0(leader).paxosservice(auth 1..2) dispatch 0x559cd1563c00 auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/2094244407 con 0x559cd1479480 2016-03-08 15:19:02.685097 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..10) is_readable = 1 - now=2016-03-08 15:19:02.685097 lease_expire=0.000000 has v0 lc 10 2016-03-08 15:19:02.685101 7f82a5352700 10 mon.a@0(leader).auth v2 preprocess_query auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/2094244407 2016-03-08 15:19:02.685105 7f82a5352700 10 mon.a@0(leader).auth v2 prep_auth() blob_size=30 2016-03-08 15:19:02.685111 7f82a5352700 10 mon.a@0(leader).auth v2 AuthMonitor::assign_global_id m=auth(proto 0 30 bytes epoch 0) v1 mon=0/1 last_allocated=4097 max_global_id=14096 2016-03-08 15:19:02.685113 7f82a5352700 10 mon.a@0(leader).auth v2 next_global_id should be 4098 2016-03-08 15:19:02.685118 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2094244407 -- mon_map magic: 0 v1 -- ?+0 0x559cd15558c0 con 0x559cd1479480 2016-03-08 15:19:02.685122 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/2094244407, have pipe. 2016-03-08 15:19:02.685129 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685137 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer encoding 1 features 576460752303423487 0x559cd15558c0 mon_map magic: 0 v1 2016-03-08 15:19:02.685136 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd17e7320 0x559cd1562300 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:02.685146 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2094244407 -- auth_reply(proto 1 0 (0) Success) v1 -- ?+0 0x559cd1562300 con 0x559cd1479480 2016-03-08 15:19:02.685147 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer signed seq # 1): sig = 0 2016-03-08 15:19:02.685157 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sending 1 0x559cd15558c0 2016-03-08 15:19:02.685168 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message auth_reply(proto 1 0 (0) Success) v1 remote, 127.0.0.1:0/2094244407, have pipe. 2016-03-08 15:19:02.685186 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685190 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer encoding 2 features 576460752303423487 0x559cd1562300 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:02.685196 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 60 to dispatch throttler 60/104857600 2016-03-08 15:19:02.685197 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer signed seq # 2): sig = 0 2016-03-08 15:19:02.685199 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1563c00 2016-03-08 15:19:02.685200 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sending 2 0x559cd1562300 2016-03-08 15:19:02.685213 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685224 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sleeping 2016-03-08 15:19:02.685341 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got ACK 2016-03-08 15:19:02.685352 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got ack seq 1 2016-03-08 15:19:02.685355 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader reading tag... 2016-03-08 15:19:02.685357 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got ACK 2016-03-08 15:19:02.685359 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got ack seq 2 2016-03-08 15:19:02.685361 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader reading tag... 2016-03-08 15:19:02.685363 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got MSG 2016-03-08 15:19:02.685367 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got envelope type=15 src client.? front=23 data=0 off 0 2016-03-08 15:19:02.685371 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader wants 23 bytes from policy throttler 0/104857600 2016-03-08 15:19:02.685375 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader wants 23 from dispatch throttler 0/104857600 2016-03-08 15:19:02.685381 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got front 23 2016-03-08 15:19:02.685386 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).aborted = 0 2016-03-08 15:19:02.685388 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:02.685396 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got message 2 0x559cd14d7e00 mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:02.685400 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd14d7e00 prio 127 2016-03-08 15:19:02.685404 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader reading tag... 2016-03-08 15:19:02.685407 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685411 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/2094244407 2 ==== mon_subscribe({monmap=0+}) v2 ==== 23+0+0 (1620593354 0 0) 0x559cd14d7e00 con 0x559cd1479480 2016-03-08 15:19:02.685419 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).write_ack 2 2016-03-08 15:19:02.685421 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got MSG 2016-03-08 15:19:02.685426 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685430 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got envelope type=15 src client.4098 front=23 data=0 off 0 2016-03-08 15:19:02.685430 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sleeping 2016-03-08 15:19:02.685433 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d7200 for client.? 127.0.0.1:0/2094244407 2016-03-08 15:19:02.685433 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader wants 23 bytes from policy throttler 23/104857600 2016-03-08 15:19:02.685435 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.685436 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader wants 23 from dispatch throttler 23/104857600 2016-03-08 15:19:02.685438 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.685440 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.685440 7f82a5352700 20 allow all 2016-03-08 15:19:02.685439 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got front 23 2016-03-08 15:19:02.685441 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:02.685441 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).aborted = 0 2016-03-08 15:19:02.685447 7f82a5352700 10 mon.a@0(leader) e1 check_sub monmap next 0 have 1 2016-03-08 15:19:02.685447 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:02.685451 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2094244407 -- mon_map magic: 0 v1 -- ?+0 0x559cd15561c0 con 0x559cd1479480 2016-03-08 15:19:02.685453 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/2094244407, have pipe. 2016-03-08 15:19:02.685460 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685463 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 46/104857600 2016-03-08 15:19:02.685465 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd14d7e00 2016-03-08 15:19:02.685466 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer encoding 3 features 576460752303423487 0x559cd15561c0 mon_map magic: 0 v1 2016-03-08 15:19:02.685473 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer signed seq # 3): sig = 0 2016-03-08 15:19:02.685480 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sending 3 0x559cd15561c0 2016-03-08 15:19:02.685499 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685504 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sleeping 2016-03-08 15:19:02.685530 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got message 3 0x559cd1856000 mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:02.685545 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1856000 prio 127 2016-03-08 15:19:02.685552 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader reading tag... 2016-03-08 15:19:02.685554 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685559 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).write_ack 3 2016-03-08 15:19:02.685555 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4098 127.0.0.1:0/2094244407 3 ==== mon_subscribe({osdmap=0}) v2 ==== 23+0+0 (319581110 0 0) 0x559cd1856000 con 0x559cd1479480 2016-03-08 15:19:02.685565 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685568 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sleeping 2016-03-08 15:19:02.685580 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d7200 for client.? 127.0.0.1:0/2094244407 2016-03-08 15:19:02.685582 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.685585 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.685586 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.685587 7f82a5352700 20 allow all 2016-03-08 15:19:02.685588 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:02.685590 7f82a5352700 20 is_capable service=osd command= read on cap allow * 2016-03-08 15:19:02.685591 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.685591 7f82a5352700 20 allow all 2016-03-08 15:19:02.685592 7f82a5352700 10 mon.a@0(leader).osd e2 check_sub 0x559cd16f96c0 next 0 (onetime) 2016-03-08 15:19:02.685596 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2094244407 -- osd_map(2..2 src has 1..2) v3 -- ?+0 0x559cd1563c00 con 0x559cd1479480 2016-03-08 15:19:02.685599 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message osd_map(2..2 src has 1..2) v3 remote, 127.0.0.1:0/2094244407, have pipe. 2016-03-08 15:19:02.685605 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685608 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 23/104857600 2016-03-08 15:19:02.685610 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1856000 2016-03-08 15:19:02.685610 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer encoding 4 features 576460752303423487 0x559cd1563c00 osd_map(2..2 src has 1..2) v3 2016-03-08 15:19:02.685619 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer signed seq # 4): sig = 0 2016-03-08 15:19:02.685624 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sending 4 0x559cd1563c00 2016-03-08 15:19:02.685648 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.685653 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sleeping 2016-03-08 15:19:02.687119 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got ACK 2016-03-08 15:19:02.687129 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got ack seq 3 2016-03-08 15:19:02.687132 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader reading tag... 2016-03-08 15:19:02.687134 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got ACK 2016-03-08 15:19:02.687136 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got ack seq 4 2016-03-08 15:19:02.687138 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader reading tag... 2016-03-08 15:19:02.687140 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got MSG 2016-03-08 15:19:02.687143 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got envelope type=50 src client.4098 front=80 data=0 off 0 2016-03-08 15:19:02.687146 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader wants 80 bytes from policy throttler 0/104857600 2016-03-08 15:19:02.687149 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader wants 80 from dispatch throttler 0/104857600 2016-03-08 15:19:02.687152 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got front 80 2016-03-08 15:19:02.687155 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).aborted = 0 2016-03-08 15:19:02.687157 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got 80 + 0 + 0 byte message 2016-03-08 15:19:02.687165 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got message 4 0x559cd15558c0 mon_command({"prefix": "get_command_descriptions"} v 0) v1 2016-03-08 15:19:02.687168 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd15558c0 prio 127 2016-03-08 15:19:02.687172 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader reading tag... 2016-03-08 15:19:02.687175 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.687177 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4098 127.0.0.1:0/2094244407 4 ==== mon_command({"prefix": "get_command_descriptions"} v 0) v1 ==== 80+0+0 (3134233619 0 0) 0x559cd15558c0 con 0x559cd1479480 2016-03-08 15:19:02.687184 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).write_ack 4 2016-03-08 15:19:02.687190 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.687195 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sleeping 2016-03-08 15:19:02.687204 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d7200 for client.? 127.0.0.1:0/2094244407 2016-03-08 15:19:02.687208 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.688405 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd17e7d40 0x559cd1555f80 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:02.688409 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2094244407 -- mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 -- ?+37345 0x559cd1555f80 con 0x559cd1479480 2016-03-08 15:19:02.688413 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 remote, 127.0.0.1:0/2094244407, have pipe. 2016-03-08 15:19:02.688421 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.688425 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 80 to dispatch throttler 80/104857600 2016-03-08 15:19:02.688433 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15558c0 2016-03-08 15:19:02.688431 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer encoding 5 features 576460752303423487 0x559cd1555f80 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:02.688445 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer signed seq # 5): sig = 0 2016-03-08 15:19:02.688452 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sending 5 0x559cd1555f80 2016-03-08 15:19:02.688482 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.688487 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sleeping 2016-03-08 15:19:02.702311 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..4) propose_pending 2016-03-08 15:19:02.702323 7f82a5b53700 10 mon.a@0(leader).log v4 encode_full log v 4 2016-03-08 15:19:02.702348 7f82a5b53700 10 mon.a@0(leader).log v4 encode_pending v5 2016-03-08 15:19:02.702365 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..10) queue_pending_finisher 0x559cd1494390 2016-03-08 15:19:02.702369 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..10) trigger_propose active, proposing now 2016-03-08 15:19:02.702380 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..10) propose_pending 11 3192 bytes 2016-03-08 15:19:02.702381 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..10) begin for 11 3192 bytes 2016-03-08 15:19:02.710321 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..10) commit_start 11 2016-03-08 15:19:02.716784 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..10) commit_finish 11 2016-03-08 15:19:02.716815 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.716828 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) refresh 2016-03-08 15:19:02.716836 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.716843 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) refresh 2016-03-08 15:19:02.716850 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..5) refresh 2016-03-08 15:19:02.716852 7f82a6b55700 10 mon.a@0(leader).log v5 update_from_paxos 2016-03-08 15:19:02.716853 7f82a6b55700 10 mon.a@0(leader).log v5 update_from_paxos version 5 summary v 4 2016-03-08 15:19:02.716857 7f82a6b55700 10 mon.a@0(leader).log v5 update_from_paxos latest full 4 2016-03-08 15:19:02.716863 7f82a6b55700 7 mon.a@0(leader).log v5 update_from_paxos applying incremental log 5 2016-03-08 15:19:02.589402 mon.0 127.0.0.1:7104/0 11 : cluster [INF] pgmap v3: 0 pgs: ; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:02.716876 7f82a6b55700 20 mon.a@0(leader).log v5 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.716883 7f82a6b55700 15 mon.a@0(leader).log v5 update_from_paxos logging for 1 channels 2016-03-08 15:19:02.716885 7f82a6b55700 15 mon.a@0(leader).log v5 update_from_paxos channel 'cluster' logging 131 bytes 2016-03-08 15:19:02.716901 7f82a6b55700 10 mon.a@0(leader).log v5 check_subs 2016-03-08 15:19:02.716912 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.716919 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:02.716920 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:02.716922 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) post_refresh 2016-03-08 15:19:02.716923 7f82a6b55700 10 mon.a@0(leader).pg v3 post_paxos_update 2016-03-08 15:19:02.716925 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.716925 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) post_refresh 2016-03-08 15:19:02.716926 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..5) post_refresh 2016-03-08 15:19:02.716927 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.716927 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:02.716928 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..11) commit_proposal 2016-03-08 15:19:02.716930 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..5) _active - not active 2016-03-08 15:19:02.716938 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..11) finish_round 2016-03-08 15:19:02.716939 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..11) finish_round waiting_for_acting 2016-03-08 15:19:02.716940 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..5) _active 2016-03-08 15:19:02.716941 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..5) remove_legacy_versions 2016-03-08 15:19:02.716944 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..5) _active creating new pending 2016-03-08 15:19:02.716947 7f82a6b55700 10 mon.a@0(leader).log v5 create_pending v 6 2016-03-08 15:19:02.716960 7f82a6b55700 7 mon.a@0(leader).log v5 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.716965 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd17e6fc0 0x559cd1556880 log(last 11) v1 2016-03-08 15:19:02.716969 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 11) v1 -- ?+0 0x559cd1556880 con 0x559cd1477080 2016-03-08 15:19:02.716973 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 11) v1 local 2016-03-08 15:19:02.716992 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..11) finish_round waiting_for_readable 2016-03-08 15:19:02.716994 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..11) finish_round waiting_for_writeable 2016-03-08 15:19:02.716995 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..11) finish_round done w/ waiters, state 1 2016-03-08 15:19:02.716993 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 11) v1 ==== 0+0+0 (0 0 0) 0x559cd1556880 con 0x559cd1477080 2016-03-08 15:19:02.717021 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.717025 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.717035 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.717037 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.717038 7f82a5352700 20 allow all 2016-03-08 15:19:02.717053 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1556880 2016-03-08 15:19:02.736256 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got ACK 2016-03-08 15:19:02.736292 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got ack seq 5 2016-03-08 15:19:02.736295 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader reading tag... 2016-03-08 15:19:02.736297 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got MSG 2016-03-08 15:19:02.736300 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got envelope type=50 src client.4098 front=99 data=0 off 0 2016-03-08 15:19:02.736302 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader wants 99 bytes from policy throttler 0/104857600 2016-03-08 15:19:02.736305 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader wants 99 from dispatch throttler 0/104857600 2016-03-08 15:19:02.736309 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got front 99 2016-03-08 15:19:02.736311 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).aborted = 0 2016-03-08 15:19:02.736313 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got 99 + 0 + 0 byte message 2016-03-08 15:19:02.736322 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader got message 5 0x559cd15561c0 mon_command({"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"} v 0) v1 2016-03-08 15:19:02.736325 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd15561c0 prio 127 2016-03-08 15:19:02.736339 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader reading tag... 2016-03-08 15:19:02.736343 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.736353 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).write_ack 5 2016-03-08 15:19:02.736348 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4098 127.0.0.1:0/2094244407 5 ==== mon_command({"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"} v 0) v1 ==== 99+0+0 (2527081536 0 0) 0x559cd15561c0 con 0x559cd1479480 2016-03-08 15:19:02.736368 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.736370 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sleeping 2016-03-08 15:19:02.736406 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d7200 for client.? 127.0.0.1:0/2094244407 2016-03-08 15:19:02.736410 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.736439 7f82a5352700 0 mon.a@0(leader) e1 handle_command mon_command({"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"} v 0) v1 2016-03-08 15:19:02.736459 7f82a5352700 20 is_capable service=osd command=osd pool create read write on cap allow * 2016-03-08 15:19:02.736461 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.736462 7f82a5352700 20 allow all 2016-03-08 15:19:02.736463 7f82a5352700 10 mon.a@0(leader) e1 _allowed_command capable 2016-03-08 15:19:02.736470 7f82a5352700 0 log_channel(audit) log [INF] : from='client.? 127.0.0.1:0/2094244407' entity='client.admin' cmd=[{"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"}]: dispatch 2016-03-08 15:19:02.736484 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 12 at 2016-03-08 15:19:02.736471) v1 -- ?+0 0x559cd1562080 con 0x559cd1477080 2016-03-08 15:19:02.736489 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 12 at 2016-03-08 15:19:02.736471) v1 local 2016-03-08 15:19:02.736510 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..2) dispatch 0x559cd15561c0 mon_command({"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"} v 0) v1 from client.4098 127.0.0.1:0/2094244407 con 0x559cd1479480 2016-03-08 15:19:02.736515 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..11) is_readable = 1 - now=2016-03-08 15:19:02.736515 lease_expire=0.000000 has v0 lc 11 2016-03-08 15:19:02.736532 7f82a5352700 10 mon.a@0(leader).osd e2 preprocess_query mon_command({"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"} v 0) v1 from client.4098 127.0.0.1:0/2094244407 2016-03-08 15:19:02.736564 7f82a5352700 7 mon.a@0(leader).osd e2 prepare_update mon_command({"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"} v 0) v1 from client.4098 127.0.0.1:0/2094244407 2016-03-08 15:19:02.744714 7f82a5352700 10 mon.a@0(leader).osd e2 should_propose 2016-03-08 15:19:02.744721 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..2) setting proposal_timer 0x559cd14944b0 with delay of 0.0720727 2016-03-08 15:19:02.744745 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 99 to dispatch throttler 99/104857600 2016-03-08 15:19:02.744755 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15561c0 2016-03-08 15:19:02.744765 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 12 at 2016-03-08 15:19:02.736471) v1 ==== 0+0+0 (0 0 0) 0x559cd1562080 con 0x559cd1477080 2016-03-08 15:19:02.744809 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.744814 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.744826 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..5) dispatch 0x559cd1562080 log(1 entries from seq 12 at 2016-03-08 15:19:02.736471) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.744833 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..11) is_readable = 1 - now=2016-03-08 15:19:02.744835 lease_expire=0.000000 has v0 lc 11 2016-03-08 15:19:02.744848 7f82a5352700 10 mon.a@0(leader).log v5 preprocess_query log(1 entries from seq 12 at 2016-03-08 15:19:02.736471) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.744865 7f82a5352700 10 mon.a@0(leader).log v5 preprocess_log log(1 entries from seq 12 at 2016-03-08 15:19:02.736471) v1 from mon.0 2016-03-08 15:19:02.744872 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.744876 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.744877 7f82a5352700 20 allow all 2016-03-08 15:19:02.744887 7f82a5352700 10 mon.a@0(leader).log v5 prepare_update log(1 entries from seq 12 at 2016-03-08 15:19:02.736471) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.744906 7f82a5352700 10 mon.a@0(leader).log v5 prepare_log log(1 entries from seq 12 at 2016-03-08 15:19:02.736471) v1 from mon.0 2016-03-08 15:19:02.744912 7f82a5352700 10 mon.a@0(leader).log v5 logging 2016-03-08 15:19:02.736471 mon.0 127.0.0.1:7104/0 12 : audit [INF] from='client.? 127.0.0.1:0/2094244407' entity='client.admin' cmd=[{"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"}]: dispatch 2016-03-08 15:19:02.744935 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..5) setting proposal_timer 0x559cd14944c0 with delay of 0.0718565 2016-03-08 15:19:02.744944 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1562080 2016-03-08 15:19:02.816931 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..5) propose_pending 2016-03-08 15:19:02.816980 7f82a5b53700 10 mon.a@0(leader).log v5 encode_full log v 5 2016-03-08 15:19:02.817010 7f82a5b53700 10 mon.a@0(leader).log v5 encode_pending v6 2016-03-08 15:19:02.817018 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..11) queue_pending_finisher 0x559cd1494200 2016-03-08 15:19:02.817020 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..11) trigger_propose active, proposing now 2016-03-08 15:19:02.817026 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..11) propose_pending 12 3508 bytes 2016-03-08 15:19:02.817028 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..11) begin for 12 3508 bytes 2016-03-08 15:19:02.825249 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..11) commit_start 12 2016-03-08 15:19:02.825298 7f82a5b53700 10 mon.a@0(leader).paxosservice(osdmap 1..2) propose_pending 2016-03-08 15:19:02.825301 7f82a5b53700 10 mon.a@0(leader).osd e2 encode_pending e 3 2016-03-08 15:19:02.825346 7f82a5b53700 20 mon.a@0(leader).osd e2 full_crc 1746400094 inc_crc 1699920343 2016-03-08 15:19:02.825356 7f82a5b53700 5 mon.a@0(leader).paxos(paxos writing c 1..11) queue_pending_finisher 0x559cd14944c0 2016-03-08 15:19:02.825358 7f82a5b53700 10 mon.a@0(leader).paxos(paxos writing c 1..11) trigger_propose not active, will propose later 2016-03-08 15:19:02.831811 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..11) commit_finish 12 2016-03-08 15:19:02.831842 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.831859 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) refresh 2016-03-08 15:19:02.831869 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.831876 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) refresh 2016-03-08 15:19:02.831886 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..6) refresh 2016-03-08 15:19:02.831888 7f82a6b55700 10 mon.a@0(leader).log v6 update_from_paxos 2016-03-08 15:19:02.831890 7f82a6b55700 10 mon.a@0(leader).log v6 update_from_paxos version 6 summary v 5 2016-03-08 15:19:02.831894 7f82a6b55700 10 mon.a@0(leader).log v6 update_from_paxos latest full 5 2016-03-08 15:19:02.831903 7f82a6b55700 7 mon.a@0(leader).log v6 update_from_paxos applying incremental log 6 2016-03-08 15:19:02.736471 mon.0 127.0.0.1:7104/0 12 : audit [INF] from='client.? 127.0.0.1:0/2094244407' entity='client.admin' cmd=[{"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"}]: dispatch 2016-03-08 15:19:02.831920 7f82a6b55700 20 mon.a@0(leader).log v6 update_from_paxos logging for channel 'audit' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.831929 7f82a6b55700 15 mon.a@0(leader).log v6 update_from_paxos logging for 1 channels 2016-03-08 15:19:02.831930 7f82a6b55700 15 mon.a@0(leader).log v6 update_from_paxos channel 'audit' logging 202 bytes 2016-03-08 15:19:02.831948 7f82a6b55700 10 mon.a@0(leader).log v6 check_subs 2016-03-08 15:19:02.831975 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.831985 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:02.831988 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:02.831989 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) post_refresh 2016-03-08 15:19:02.831991 7f82a6b55700 10 mon.a@0(leader).pg v3 post_paxos_update 2016-03-08 15:19:02.831992 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.831993 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..2) post_refresh 2016-03-08 15:19:02.831994 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..6) post_refresh 2016-03-08 15:19:02.831995 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.831995 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:02.831996 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..12) commit_proposal 2016-03-08 15:19:02.831998 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..6) _active - not active 2016-03-08 15:19:02.832003 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..12) finish_round 2016-03-08 15:19:02.832004 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..12) finish_round waiting_for_acting 2016-03-08 15:19:02.832005 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..6) _active 2016-03-08 15:19:02.832006 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..6) remove_legacy_versions 2016-03-08 15:19:02.832010 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..6) _active creating new pending 2016-03-08 15:19:02.832017 7f82a6b55700 10 mon.a@0(leader).log v6 create_pending v 7 2016-03-08 15:19:02.832028 7f82a6b55700 7 mon.a@0(leader).log v6 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.832035 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd17e9cc0 0x559cd1555d40 log(last 12) v1 2016-03-08 15:19:02.832039 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 12) v1 -- ?+0 0x559cd1555d40 con 0x559cd1477080 2016-03-08 15:19:02.832042 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 12) v1 local 2016-03-08 15:19:02.832061 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..12) finish_round waiting_for_readable 2016-03-08 15:19:02.832064 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..12) finish_round waiting_for_writeable 2016-03-08 15:19:02.832066 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..12) finish_round done w/ waiters, state 1 2016-03-08 15:19:02.832070 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..12) propose_pending 13 1359 bytes 2016-03-08 15:19:02.832072 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..12) begin for 13 1359 bytes 2016-03-08 15:19:02.832083 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 12) v1 ==== 0+0+0 (0 0 0) 0x559cd1555d40 con 0x559cd1477080 2016-03-08 15:19:02.838204 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..12) commit_start 13 2016-03-08 15:19:02.838254 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.838260 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.838266 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.838269 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.838271 7f82a5352700 20 allow all 2016-03-08 15:19:02.838300 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555d40 2016-03-08 15:19:02.844438 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..12) commit_finish 13 2016-03-08 15:19:02.844459 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.844472 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) refresh 2016-03-08 15:19:02.844481 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.844488 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..3) refresh 2016-03-08 15:19:02.844491 7f82a6b55700 15 mon.a@0(leader).osd e2 update_from_paxos paxos e 3, my e 2 2016-03-08 15:19:02.844503 7f82a6b55700 7 mon.a@0(leader).osd e2 update_from_paxos applying incremental 3 2016-03-08 15:19:02.844535 7f82a6b55700 1 mon.a@0(leader).osd e3 e3: 0 osds: 0 up, 0 in 2016-03-08 15:19:02.850965 7f82a6b55700 5 mon.a@0(leader).paxos(paxos refresh c 1..13) is_readable = 0 - now=2016-03-08 15:19:02.850969 lease_expire=0.000000 has v0 lc 13 2016-03-08 15:19:02.850979 7f82a6b55700 10 mon.a@0(leader).pg v3 check_osd_map -- osdmap not readable, waiting 2016-03-08 15:19:02.850982 7f82a6b55700 10 mon.a@0(leader).osd e3 check_subs 2016-03-08 15:19:02.850984 7f82a6b55700 10 mon.a@0(leader).osd e3 share_map_with_random_osd no up osds, don't share with anyone 2016-03-08 15:19:02.850985 7f82a6b55700 10 mon.a@0(leader).osd e3 update_logger 2016-03-08 15:19:02.850990 7f82a6b55700 0 mon.a@0(leader).osd e3 crush map has features 1107558400, adjusting msgr requires 2016-03-08 15:19:02.850992 7f82a6b55700 0 mon.a@0(leader).osd e3 crush map has features 1107558400, adjusting msgr requires 2016-03-08 15:19:02.850994 7f82a6b55700 0 mon.a@0(leader).osd e3 crush map has features 1107558400, adjusting msgr requires 2016-03-08 15:19:02.850995 7f82a6b55700 0 mon.a@0(leader).osd e3 crush map has features 1107558400, adjusting msgr requires 2016-03-08 15:19:02.851010 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..6) refresh 2016-03-08 15:19:02.851013 7f82a6b55700 10 mon.a@0(leader).log v6 update_from_paxos 2016-03-08 15:19:02.851014 7f82a6b55700 10 mon.a@0(leader).log v6 update_from_paxos version 6 summary v 6 2016-03-08 15:19:02.851023 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.851030 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:02.851032 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:02.851034 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) post_refresh 2016-03-08 15:19:02.851035 7f82a6b55700 10 mon.a@0(leader).pg v3 post_paxos_update 2016-03-08 15:19:02.851036 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.851037 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..3) post_refresh 2016-03-08 15:19:02.851037 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..6) post_refresh 2016-03-08 15:19:02.851038 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.851038 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:02.851039 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..13) commit_proposal 2016-03-08 15:19:02.851048 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..3) _active - not active 2016-03-08 15:19:02.851049 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..13) finish_round 2016-03-08 15:19:02.851050 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..13) finish_round waiting_for_acting 2016-03-08 15:19:02.851051 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..3) _active 2016-03-08 15:19:02.851052 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..3) remove_legacy_versions 2016-03-08 15:19:02.851056 7f82a6b55700 7 mon.a@0(leader).paxosservice(osdmap 1..3) _active creating new pending 2016-03-08 15:19:02.851060 7f82a6b55700 10 mon.a@0(leader).osd e3 create_pending e 4 2016-03-08 15:19:02.851086 7f82a6b55700 0 log_channel(audit) log [INF] : from='client.? 127.0.0.1:0/2094244407' entity='client.admin' cmd='[{"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"}]': finished 2016-03-08 15:19:02.851097 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 13 at 2016-03-08 15:19:02.851088) v1 -- ?+0 0x559cd1562080 con 0x559cd1477080 2016-03-08 15:19:02.851103 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 13 at 2016-03-08 15:19:02.851088) v1 local 2016-03-08 15:19:02.851120 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd17e9180 0x559cd1555b00 mon_command_ack([{"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"}]=0 pool 'rbd' created v3) v1 2016-03-08 15:19:02.851125 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2094244407 -- mon_command_ack([{"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"}]=0 pool 'rbd' created v3) v1 -- ?+0 0x559cd1555b00 con 0x559cd1479480 2016-03-08 15:19:02.851129 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"}]=0 pool 'rbd' created v3) v1 remote, 127.0.0.1:0/2094244407, have pipe. 2016-03-08 15:19:02.851146 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..13) is_readable = 1 - now=2016-03-08 15:19:02.851146 lease_expire=0.000000 has v0 lc 13 2016-03-08 15:19:02.851144 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 13 at 2016-03-08 15:19:02.851088) v1 ==== 0+0+0 (0 0 0) 0x559cd1562080 con 0x559cd1477080 2016-03-08 15:19:02.851154 7f82a6b55700 10 mon.a@0(leader).pg v3 check_osd_map applying osdmap e3 to pg_map 2016-03-08 15:19:02.851158 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.851174 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer encoding 6 features 576460752303423487 0x559cd1555b00 mon_command_ack([{"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"}]=0 pool 'rbd' created v3) v1 2016-03-08 15:19:02.851183 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer signed seq # 6): sig = 0 2016-03-08 15:19:02.851188 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sending 6 0x559cd1555b00 2016-03-08 15:19:02.851189 7f82a6b55700 10 mon.a@0(leader).pg v3 map_pg_creates to 0 pgs, osdmap epoch 3 2016-03-08 15:19:02.851191 7f82a6b55700 10 mon.a@0(leader).pg v3 register_new_pgs checking pg pools for osdmap epoch 3, last_pg_scan 2 2016-03-08 15:19:02.851192 7f82a6b55700 10 mon.a@0(leader).pg v3 register_new_pgs scanning pool 1 replicated size 3 min_size 2 crush_ruleset 0 object_hash rjenkins pg_num 4 pgp_num 4 last_change 3 flags hashpspool stripe_width 0 2016-03-08 15:19:02.851202 7f82a6b55700 10 mon.a@0(leader).pg v3 register_pg will create 1.0 primary -1 acting [] 2016-03-08 15:19:02.851205 7f82a6b55700 10 mon.a@0(leader).pg v3 register_pg will create 1.1 primary -1 acting [] 2016-03-08 15:19:02.851209 7f82a6b55700 10 mon.a@0(leader).pg v3 register_pg will create 1.2 primary -1 acting [] 2016-03-08 15:19:02.851210 7f82a6b55700 10 mon.a@0(leader).pg v3 register_pg will create 1.3 primary -1 acting [] 2016-03-08 15:19:02.851211 7f82a6b55700 10 mon.a@0(leader).pg v3 register_new_pgs registered 4 new pgs, removed 0 uncreated pgs 2016-03-08 15:19:02.851212 7f82a6b55700 10 mon.a@0(leader).pg v3 check_down_pgs 2016-03-08 15:19:02.851213 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..3) propose_pending 2016-03-08 15:19:02.851215 7f82a6b55700 10 mon.a@0(leader).pg v3 encode_pending v 4 2016-03-08 15:19:02.851230 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer: state = open policy.server=1 2016-03-08 15:19:02.851235 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).writer sleeping 2016-03-08 15:19:02.851245 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..13) queue_pending_finisher 0x559cd14944c0 2016-03-08 15:19:02.851248 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..13) trigger_propose active, proposing now 2016-03-08 15:19:02.851255 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..13) propose_pending 14 2810 bytes 2016-03-08 15:19:02.851257 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..13) begin for 14 2810 bytes 2016-03-08 15:19:02.857596 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..13) commit_start 14 2016-03-08 15:19:02.857622 7f82a6b55700 10 mon.a@0(leader).osd e3 update_logger 2016-03-08 15:19:02.857627 7f82a6b55700 0 log_channel(cluster) log [INF] : osdmap e3: 0 osds: 0 up, 0 in 2016-03-08 15:19:02.857642 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 14 at 2016-03-08 15:19:02.857628) v1 -- ?+0 0x559cd17caa00 con 0x559cd1477080 2016-03-08 15:19:02.857648 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 14 at 2016-03-08 15:19:02.857628) v1 local 2016-03-08 15:19:02.857656 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..13) finish_round waiting_for_readable 2016-03-08 15:19:02.857657 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..13) finish_round waiting_for_writeable 2016-03-08 15:19:02.857660 7f82a6b55700 10 mon.a@0(leader).paxos(paxos writing c 1..13) finish_round done w/ waiters, state 4 2016-03-08 15:19:02.857706 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.857713 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.857733 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..6) dispatch 0x559cd1562080 log(1 entries from seq 13 at 2016-03-08 15:19:02.851088) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.857741 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..13) is_readable = 1 - now=2016-03-08 15:19:02.857742 lease_expire=0.000000 has v0 lc 13 2016-03-08 15:19:02.857758 7f82a5352700 10 mon.a@0(leader).log v6 preprocess_query log(1 entries from seq 13 at 2016-03-08 15:19:02.851088) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.857776 7f82a5352700 10 mon.a@0(leader).log v6 preprocess_log log(1 entries from seq 13 at 2016-03-08 15:19:02.851088) v1 from mon.0 2016-03-08 15:19:02.857782 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.857784 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.857785 7f82a5352700 20 allow all 2016-03-08 15:19:02.857794 7f82a5352700 10 mon.a@0(leader).log v6 prepare_update log(1 entries from seq 13 at 2016-03-08 15:19:02.851088) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.857806 7f82a5352700 10 mon.a@0(leader).log v6 prepare_log log(1 entries from seq 13 at 2016-03-08 15:19:02.851088) v1 from mon.0 2016-03-08 15:19:02.857810 7f82a5352700 10 mon.a@0(leader).log v6 logging 2016-03-08 15:19:02.851088 mon.0 127.0.0.1:7104/0 13 : audit [INF] from='client.? 127.0.0.1:0/2094244407' entity='client.admin' cmd='[{"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"}]': finished 2016-03-08 15:19:02.857826 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..6) setting proposal_timer 0x559cd1494500 with delay of 0.0866203 2016-03-08 15:19:02.857839 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1562080 2016-03-08 15:19:02.857845 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 14 at 2016-03-08 15:19:02.857628) v1 ==== 0+0+0 (0 0 0) 0x559cd17caa00 con 0x559cd1477080 2016-03-08 15:19:02.857887 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.857892 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.857906 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..6) dispatch 0x559cd17caa00 log(1 entries from seq 14 at 2016-03-08 15:19:02.857628) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.857912 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..13) is_readable = 1 - now=2016-03-08 15:19:02.857912 lease_expire=0.000000 has v0 lc 13 2016-03-08 15:19:02.857929 7f82a5352700 10 mon.a@0(leader).log v6 preprocess_query log(1 entries from seq 14 at 2016-03-08 15:19:02.857628) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.857945 7f82a5352700 10 mon.a@0(leader).log v6 preprocess_log log(1 entries from seq 14 at 2016-03-08 15:19:02.857628) v1 from mon.0 2016-03-08 15:19:02.857964 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.857966 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.857967 7f82a5352700 20 allow all 2016-03-08 15:19:02.857975 7f82a5352700 10 mon.a@0(leader).log v6 prepare_update log(1 entries from seq 14 at 2016-03-08 15:19:02.857628) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.857986 7f82a5352700 10 mon.a@0(leader).log v6 prepare_log log(1 entries from seq 14 at 2016-03-08 15:19:02.857628) v1 from mon.0 2016-03-08 15:19:02.857991 7f82a5352700 10 mon.a@0(leader).log v6 logging 2016-03-08 15:19:02.857628 mon.0 127.0.0.1:7104/0 14 : cluster [INF] osdmap e3: 0 osds: 0 up, 0 in 2016-03-08 15:19:02.858004 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..6) proposal_timer already set 2016-03-08 15:19:02.858008 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17caa00 2016-03-08 15:19:02.868508 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..13) commit_finish 14 2016-03-08 15:19:02.868530 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.868542 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..4) refresh 2016-03-08 15:19:02.868543 7f82a6b55700 10 mon.a@0(leader).pg v3 update_from_paxos read_incremental 2016-03-08 15:19:02.868554 7f82a6b55700 20 mon.a@0(leader).pg v3 refreshing pg 1.0 got 0 len 533 2016-03-08 15:19:02.868567 7f82a6b55700 20 mon.a@0(leader).pg v3 refreshing pg 1.1 got 0 len 533 2016-03-08 15:19:02.868572 7f82a6b55700 20 mon.a@0(leader).pg v3 refreshing pg 1.2 got 0 len 533 2016-03-08 15:19:02.868576 7f82a6b55700 20 mon.a@0(leader).pg v3 refreshing pg 1.3 got 0 len 533 2016-03-08 15:19:02.868585 7f82a6b55700 10 mon.a@0(leader).pg v4 read_pgmap_meta 2016-03-08 15:19:02.868602 7f82a6b55700 10 mon.a@0(leader).pg v4 update_logger 2016-03-08 15:19:02.868611 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.868618 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..3) refresh 2016-03-08 15:19:02.868625 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..6) refresh 2016-03-08 15:19:02.868627 7f82a6b55700 10 mon.a@0(leader).log v6 update_from_paxos 2016-03-08 15:19:02.868628 7f82a6b55700 10 mon.a@0(leader).log v6 update_from_paxos version 6 summary v 6 2016-03-08 15:19:02.868636 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.868642 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:02.868643 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:02.868644 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..4) post_refresh 2016-03-08 15:19:02.868645 7f82a6b55700 10 mon.a@0(leader).pg v4 post_paxos_update 2016-03-08 15:19:02.868646 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.868648 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..3) post_refresh 2016-03-08 15:19:02.868649 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..6) post_refresh 2016-03-08 15:19:02.868649 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.868650 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:02.868651 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..14) commit_proposal 2016-03-08 15:19:02.868653 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..4) _active - not active 2016-03-08 15:19:02.868654 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..14) finish_round 2016-03-08 15:19:02.868655 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..14) finish_round waiting_for_acting 2016-03-08 15:19:02.868656 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..4) _active 2016-03-08 15:19:02.868657 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..4) remove_legacy_versions 2016-03-08 15:19:02.868660 7f82a6b55700 7 mon.a@0(leader).paxosservice(pgmap 1..4) _active creating new pending 2016-03-08 15:19:02.868662 7f82a6b55700 10 mon.a@0(leader).pg v4 create_pending v 5 2016-03-08 15:19:02.868664 7f82a6b55700 10 mon.a@0(leader).pg v4 check_osd_map already seen 3 >= 3 2016-03-08 15:19:02.868664 7f82a6b55700 10 mon.a@0(leader).pg v4 update_logger 2016-03-08 15:19:02.868672 7f82a6b55700 0 log_channel(cluster) log [INF] : pgmap v4: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:02.868685 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 15 at 2016-03-08 15:19:02.868673) v1 -- ?+0 0x559cd1561e00 con 0x559cd1477080 2016-03-08 15:19:02.868689 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 15 at 2016-03-08 15:19:02.868673) v1 local 2016-03-08 15:19:02.868695 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..14) finish_round waiting_for_readable 2016-03-08 15:19:02.868696 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..14) finish_round waiting_for_writeable 2016-03-08 15:19:02.868697 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..14) finish_round done w/ waiters, state 1 2016-03-08 15:19:02.868724 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 15 at 2016-03-08 15:19:02.868673) v1 ==== 0+0+0 (0 0 0) 0x559cd1561e00 con 0x559cd1477080 2016-03-08 15:19:02.868769 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.868774 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.868792 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..6) dispatch 0x559cd1561e00 log(1 entries from seq 15 at 2016-03-08 15:19:02.868673) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:02.868800 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..14) is_readable = 1 - now=2016-03-08 15:19:02.868800 lease_expire=0.000000 has v0 lc 14 2016-03-08 15:19:02.868815 7f82a5352700 10 mon.a@0(leader).log v6 preprocess_query log(1 entries from seq 15 at 2016-03-08 15:19:02.868673) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.868830 7f82a5352700 10 mon.a@0(leader).log v6 preprocess_log log(1 entries from seq 15 at 2016-03-08 15:19:02.868673) v1 from mon.0 2016-03-08 15:19:02.868836 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:02.868837 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.868838 7f82a5352700 20 allow all 2016-03-08 15:19:02.868852 7f82a5352700 10 mon.a@0(leader).log v6 prepare_update log(1 entries from seq 15 at 2016-03-08 15:19:02.868673) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.868863 7f82a5352700 10 mon.a@0(leader).log v6 prepare_log log(1 entries from seq 15 at 2016-03-08 15:19:02.868673) v1 from mon.0 2016-03-08 15:19:02.868869 7f82a5352700 10 mon.a@0(leader).log v6 logging 2016-03-08 15:19:02.868673 mon.0 127.0.0.1:7104/0 15 : cluster [INF] pgmap v4: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:02.868885 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..6) proposal_timer already set 2016-03-08 15:19:02.868888 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1561e00 2016-03-08 15:19:02.901211 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).reader couldn't read tag, (0) Success 2016-03-08 15:19:02.901240 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).fault (0) Success 2016-03-08 15:19:02.901284 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).fault on lossy channel, failing 2016-03-08 15:19:02.901291 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479480).stop 2016-03-08 15:19:02.901297 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479480).unregister_pipe 2016-03-08 15:19:02.901304 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479480).discard_queue 2016-03-08 15:19:02.901314 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479480).reader done 2016-03-08 15:19:02.901318 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479480).writer finishing 2016-03-08 15:19:02.901321 7f82a5352700 10 mon.a@0(leader) e1 ms_handle_reset 0x559cd1479480 127.0.0.1:0/2094244407 2016-03-08 15:19:02.901334 7f82a334e700 10 -- 127.0.0.1:7104/0 queue_reap 0x559cd17ab400 2016-03-08 15:19:02.901334 7f82a5352700 10 mon.a@0(leader) e1 reset/close on session client.? 127.0.0.1:0/2094244407 2016-03-08 15:19:02.901339 7f82a5352700 10 mon.a@0(leader) e1 remove_session 0x559cd14d7200 client.? 127.0.0.1:0/2094244407 2016-03-08 15:19:02.901340 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479480).writer done 2016-03-08 15:19:02.901346 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper 2016-03-08 15:19:02.901361 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaping pipe 0x559cd17ab400 127.0.0.1:0/2094244407 2016-03-08 15:19:02.901364 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479480).discard_queue 2016-03-08 15:19:02.901367 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479480).unregister_pipe - not registered 2016-03-08 15:19:02.901370 7f82a6354700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2094244407 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479480).join 2016-03-08 15:19:02.901382 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaped pipe 0x559cd17ab400 127.0.0.1:0/2094244407 2016-03-08 15:19:02.901392 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper deleted pipe 0x559cd17ab400 2016-03-08 15:19:02.901394 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper done 2016-03-08 15:19:02.944518 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..6) propose_pending 2016-03-08 15:19:02.944533 7f82a5b53700 10 mon.a@0(leader).log v6 encode_full log v 6 2016-03-08 15:19:02.944564 7f82a5b53700 10 mon.a@0(leader).log v6 encode_pending v7 2016-03-08 15:19:02.944570 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..14) queue_pending_finisher 0x559cd14944b0 2016-03-08 15:19:02.944572 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..14) trigger_propose active, proposing now 2016-03-08 15:19:02.944577 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..14) propose_pending 15 4294 bytes 2016-03-08 15:19:02.944578 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..14) begin for 15 4294 bytes 2016-03-08 15:19:02.952934 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..14) commit_start 15 2016-03-08 15:19:02.959095 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..14) commit_finish 15 2016-03-08 15:19:02.959137 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:02.959152 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..4) refresh 2016-03-08 15:19:02.959160 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:02.959166 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..3) refresh 2016-03-08 15:19:02.959173 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..7) refresh 2016-03-08 15:19:02.959174 7f82a6b55700 10 mon.a@0(leader).log v7 update_from_paxos 2016-03-08 15:19:02.959175 7f82a6b55700 10 mon.a@0(leader).log v7 update_from_paxos version 7 summary v 6 2016-03-08 15:19:02.959179 7f82a6b55700 10 mon.a@0(leader).log v7 update_from_paxos latest full 6 2016-03-08 15:19:02.959185 7f82a6b55700 7 mon.a@0(leader).log v7 update_from_paxos applying incremental log 7 2016-03-08 15:19:02.851088 mon.0 127.0.0.1:7104/0 13 : audit [INF] from='client.? 127.0.0.1:0/2094244407' entity='client.admin' cmd='[{"prefix": "osd pool create", "pg_num": 4, "pool": "rbd"}]': finished 2016-03-08 15:19:02.959198 7f82a6b55700 20 mon.a@0(leader).log v7 update_from_paxos logging for channel 'audit' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.959205 7f82a6b55700 7 mon.a@0(leader).log v7 update_from_paxos applying incremental log 7 2016-03-08 15:19:02.857628 mon.0 127.0.0.1:7104/0 14 : cluster [INF] osdmap e3: 0 osds: 0 up, 0 in 2016-03-08 15:19:02.959209 7f82a6b55700 20 mon.a@0(leader).log v7 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.959213 7f82a6b55700 7 mon.a@0(leader).log v7 update_from_paxos applying incremental log 7 2016-03-08 15:19:02.868673 mon.0 127.0.0.1:7104/0 15 : cluster [INF] pgmap v4: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:02.959215 7f82a6b55700 20 mon.a@0(leader).log v7 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:02.959221 7f82a6b55700 15 mon.a@0(leader).log v7 update_from_paxos logging for 2 channels 2016-03-08 15:19:02.959222 7f82a6b55700 15 mon.a@0(leader).log v7 update_from_paxos channel 'audit' logging 204 bytes 2016-03-08 15:19:02.959240 7f82a6b55700 15 mon.a@0(leader).log v7 update_from_paxos channel 'cluster' logging 240 bytes 2016-03-08 15:19:02.959246 7f82a6b55700 10 mon.a@0(leader).log v7 check_subs 2016-03-08 15:19:02.959257 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:02.959264 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:02.959265 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:02.959267 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..4) post_refresh 2016-03-08 15:19:02.959268 7f82a6b55700 10 mon.a@0(leader).pg v4 post_paxos_update 2016-03-08 15:19:02.959269 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:02.959269 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..3) post_refresh 2016-03-08 15:19:02.959270 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..7) post_refresh 2016-03-08 15:19:02.959272 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:02.959272 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:02.959273 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..15) commit_proposal 2016-03-08 15:19:02.959275 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..7) _active - not active 2016-03-08 15:19:02.959277 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..15) finish_round 2016-03-08 15:19:02.959278 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..15) finish_round waiting_for_acting 2016-03-08 15:19:02.959279 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..7) _active 2016-03-08 15:19:02.959280 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..7) remove_legacy_versions 2016-03-08 15:19:02.959283 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..7) _active creating new pending 2016-03-08 15:19:02.959290 7f82a6b55700 10 mon.a@0(leader).log v7 create_pending v 8 2016-03-08 15:19:02.959297 7f82a6b55700 7 mon.a@0(leader).log v7 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.959310 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd18067e0 0x559cd15561c0 log(last 13) v1 2016-03-08 15:19:02.959312 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 13) v1 -- ?+0 0x559cd15561c0 con 0x559cd1477080 2016-03-08 15:19:02.959315 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 13) v1 local 2016-03-08 15:19:02.959340 7f82a6b55700 7 mon.a@0(leader).log v7 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.959345 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd1806fc0 0x559cd15558c0 log(last 14) v1 2016-03-08 15:19:02.959346 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 14) v1 -- ?+0 0x559cd15558c0 con 0x559cd1477080 2016-03-08 15:19:02.959348 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 14) v1 local 2016-03-08 15:19:02.959348 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 13) v1 ==== 0+0+0 (0 0 0) 0x559cd15561c0 con 0x559cd1477080 2016-03-08 15:19:02.959362 7f82a6b55700 7 mon.a@0(leader).log v7 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.959365 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd1808520 0x559cd1555680 log(last 15) v1 2016-03-08 15:19:02.959366 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 15) v1 -- ?+0 0x559cd1555680 con 0x559cd1477080 2016-03-08 15:19:02.959368 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 15) v1 local 2016-03-08 15:19:02.959379 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..15) finish_round waiting_for_readable 2016-03-08 15:19:02.959381 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..15) finish_round waiting_for_writeable 2016-03-08 15:19:02.959381 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..15) finish_round done w/ waiters, state 1 2016-03-08 15:19:02.959424 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.959429 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.959436 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.959437 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.959438 7f82a5352700 20 allow all 2016-03-08 15:19:02.959449 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15561c0 2016-03-08 15:19:02.959451 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 14) v1 ==== 0+0+0 (0 0 0) 0x559cd15558c0 con 0x559cd1477080 2016-03-08 15:19:02.959461 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.959463 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.959465 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.959467 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.959467 7f82a5352700 20 allow all 2016-03-08 15:19:02.959472 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15558c0 2016-03-08 15:19:02.959475 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 15) v1 ==== 0+0+0 (0 0 0) 0x559cd1555680 con 0x559cd1477080 2016-03-08 15:19:02.959483 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:02.959484 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:02.959487 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:02.959488 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:02.959488 7f82a5352700 20 allow all 2016-03-08 15:19:02.959493 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555680 2016-03-08 15:19:03.304178 7f82a4350700 20 accepter.accepter poll got 1 2016-03-08 15:19:03.304190 7f82a4350700 10 accepter.pfd.revents=1 2016-03-08 15:19:03.304196 7f82a4350700 10 accepter.accepted incoming on sd 21 2016-03-08 15:19:03.304222 7f82a4350700 20 accepter.accepter calling poll 2016-03-08 15:19:03.304232 7f82aea26700 10 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd17ac800 sd=21 :0 s=0 pgs=0 cs=0 l=0 c=0x559cd1479300).accept 2016-03-08 15:19:03.304282 7f82aea26700 1 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd17ac800 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1479300).accept sd=21 127.0.0.1:52222/0 2016-03-08 15:19:03.304349 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1479300).accept peer addr is 127.0.0.1:0/3061411355 2016-03-08 15:19:03.304356 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1479300).accept got peer connect_seq 0 global_seq 1 2016-03-08 15:19:03.304363 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1479300).accept of host_type 8, policy.lossy=1 policy.server=1 policy.standby=0 policy.resetcheck=0 2016-03-08 15:19:03.304366 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1479300).accept my proto 15, their proto 15 2016-03-08 15:19:03.304369 7f82aea26700 10 mon.a@0(leader) e1 ms_verify_authorizer 127.0.0.1:0/3061411355 client protocol 0 2016-03-08 15:19:03.304373 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1479300).accept: setting up session_security. 2016-03-08 15:19:03.304376 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1479300).accept new session 2016-03-08 15:19:03.304378 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).accept success, connect_seq = 1, sending READY 2016-03-08 15:19:03.304381 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).accept features 576460752303423487 2016-03-08 15:19:03.304388 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).register_pipe 2016-03-08 15:19:03.304402 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).discard_requeued_up_to 0 2016-03-08 15:19:03.304406 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).accept starting writer, state open 2016-03-08 15:19:03.304424 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).accept done 2016-03-08 15:19:03.304430 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader reading tag... 2016-03-08 15:19:03.304445 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.304457 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sleeping 2016-03-08 15:19:03.304467 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got KEEPALIVE2 2016-03-08 15:19:03.304447 2016-03-08 15:19:03.304477 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader reading tag... 2016-03-08 15:19:03.304495 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got MSG 2016-03-08 15:19:03.304496 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.304501 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got envelope type=17 src client.? front=68 data=0 off 0 2016-03-08 15:19:03.304504 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader wants 68 bytes from policy throttler 0/104857600 2016-03-08 15:19:03.304508 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader wants 68 from dispatch throttler 0/104857600 2016-03-08 15:19:03.304504 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).write_keepalive2 15 2016-03-08 15:19:03.304447 2016-03-08 15:19:03.304513 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got front 68 2016-03-08 15:19:03.304515 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).aborted = 0 2016-03-08 15:19:03.304518 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got 68 + 0 + 0 byte message 2016-03-08 15:19:03.304526 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got message 1 0x559cd1562580 auth(proto 0 38 bytes epoch 0) v1 2016-03-08 15:19:03.304531 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1562580 prio 127 2016-03-08 15:19:03.304538 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader reading tag... 2016-03-08 15:19:03.304541 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).write_ack 1 2016-03-08 15:19:03.304546 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/3061411355 1 ==== auth(proto 0 38 bytes epoch 0) v1 ==== 68+0+0 (424667196 0 0) 0x559cd1562580 con 0x559cd1479300 2016-03-08 15:19:03.304553 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.304557 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sleeping 2016-03-08 15:19:03.304580 7f82a5352700 10 mon.a@0(leader) e1 _ms_dispatch new session 0x559cd14d7200 MonSession(client.? 127.0.0.1:0/3061411355 is open) 2016-03-08 15:19:03.304585 7f82a5352700 20 mon.a@0(leader) e1 caps 2016-03-08 15:19:03.304591 7f82a5352700 10 mon.a@0(leader).paxosservice(auth 1..2) dispatch 0x559cd1562580 auth(proto 0 38 bytes epoch 0) v1 from client.? 127.0.0.1:0/3061411355 con 0x559cd1479300 2016-03-08 15:19:03.304596 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..15) is_readable = 1 - now=2016-03-08 15:19:03.304596 lease_expire=0.000000 has v0 lc 15 2016-03-08 15:19:03.304601 7f82a5352700 10 mon.a@0(leader).auth v2 preprocess_query auth(proto 0 38 bytes epoch 0) v1 from client.? 127.0.0.1:0/3061411355 2016-03-08 15:19:03.304604 7f82a5352700 10 mon.a@0(leader).auth v2 prep_auth() blob_size=38 2016-03-08 15:19:03.304611 7f82a5352700 10 mon.a@0(leader).auth v2 AuthMonitor::assign_global_id m=auth(proto 0 38 bytes epoch 0) v1 mon=0/1 last_allocated=4098 max_global_id=14096 2016-03-08 15:19:03.304614 7f82a5352700 10 mon.a@0(leader).auth v2 next_global_id should be 4099 2016-03-08 15:19:03.304618 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/3061411355 -- mon_map magic: 0 v1 -- ?+0 0x559cd1555680 con 0x559cd1479300 2016-03-08 15:19:03.304623 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/3061411355, have pipe. 2016-03-08 15:19:03.304631 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.304642 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer encoding 1 features 576460752303423487 0x559cd1555680 mon_map magic: 0 v1 2016-03-08 15:19:03.304644 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd1808ac0 0x559cd1563c00 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:03.304651 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/3061411355 -- auth_reply(proto 1 0 (0) Success) v1 -- ?+0 0x559cd1563c00 con 0x559cd1479300 2016-03-08 15:19:03.304652 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer signed seq # 1): sig = 0 2016-03-08 15:19:03.304663 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sending 1 0x559cd1555680 2016-03-08 15:19:03.304665 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message auth_reply(proto 1 0 (0) Success) v1 remote, 127.0.0.1:0/3061411355, have pipe. 2016-03-08 15:19:03.304687 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 68 to dispatch throttler 68/104857600 2016-03-08 15:19:03.304688 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.304693 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1562580 2016-03-08 15:19:03.304694 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer encoding 2 features 576460752303423487 0x559cd1563c00 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:03.304705 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer signed seq # 2): sig = 0 2016-03-08 15:19:03.304708 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sending 2 0x559cd1563c00 2016-03-08 15:19:03.304721 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.304725 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sleeping 2016-03-08 15:19:03.304833 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got ACK 2016-03-08 15:19:03.304839 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got ack seq 1 2016-03-08 15:19:03.304842 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader reading tag... 2016-03-08 15:19:03.304844 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got ACK 2016-03-08 15:19:03.304846 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got ack seq 2 2016-03-08 15:19:03.304848 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader reading tag... 2016-03-08 15:19:03.304850 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got MSG 2016-03-08 15:19:03.304852 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got envelope type=15 src client.? front=23 data=0 off 0 2016-03-08 15:19:03.304855 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader wants 23 bytes from policy throttler 0/104857600 2016-03-08 15:19:03.304857 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader wants 23 from dispatch throttler 0/104857600 2016-03-08 15:19:03.304861 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got front 23 2016-03-08 15:19:03.304864 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).aborted = 0 2016-03-08 15:19:03.304866 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:03.304874 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got message 2 0x559cd1857000 mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:03.304879 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1857000 prio 127 2016-03-08 15:19:03.304885 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader reading tag... 2016-03-08 15:19:03.304889 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/3061411355 2 ==== mon_subscribe({monmap=0+}) v2 ==== 23+0+0 (1620593354 0 0) 0x559cd1857000 con 0x559cd1479300 2016-03-08 15:19:03.304903 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got MSG 2016-03-08 15:19:03.304898 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.304907 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got envelope type=15 src client.4099 front=23 data=0 off 0 2016-03-08 15:19:03.304910 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader wants 23 bytes from policy throttler 23/104857600 2016-03-08 15:19:03.304910 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).write_ack 2 2016-03-08 15:19:03.304913 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader wants 23 from dispatch throttler 23/104857600 2016-03-08 15:19:03.304917 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got front 23 2016-03-08 15:19:03.304919 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.304920 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d7200 for client.? 127.0.0.1:0/3061411355 2016-03-08 15:19:03.304925 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).aborted = 0 2016-03-08 15:19:03.304923 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sleeping 2016-03-08 15:19:03.304927 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:03.304931 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.304933 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got message 3 0x559cd1857200 mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:03.304935 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:03.304936 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.304936 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1857200 prio 127 2016-03-08 15:19:03.304938 7f82a5352700 20 allow all 2016-03-08 15:19:03.304939 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:03.304940 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader reading tag... 2016-03-08 15:19:03.304944 7f82a5352700 10 mon.a@0(leader) e1 check_sub monmap next 0 have 1 2016-03-08 15:19:03.304942 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.304947 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).write_ack 3 2016-03-08 15:19:03.304963 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/3061411355 -- mon_map magic: 0 v1 -- ?+0 0x559cd15558c0 con 0x559cd1479300 2016-03-08 15:19:03.304965 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.304969 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sleeping 2016-03-08 15:19:03.304978 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/3061411355, have pipe. 2016-03-08 15:19:03.304990 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.304997 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer encoding 3 features 576460752303423487 0x559cd15558c0 mon_map magic: 0 v1 2016-03-08 15:19:03.305002 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer signed seq # 3): sig = 0 2016-03-08 15:19:03.305008 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sending 3 0x559cd15558c0 2016-03-08 15:19:03.305024 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 46/104857600 2016-03-08 15:19:03.305029 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1857000 2016-03-08 15:19:03.305032 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.305035 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sleeping 2016-03-08 15:19:03.305034 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4099 127.0.0.1:0/3061411355 3 ==== mon_subscribe({osdmap=0}) v2 ==== 23+0+0 (319581110 0 0) 0x559cd1857200 con 0x559cd1479300 2016-03-08 15:19:03.305063 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d7200 for client.? 127.0.0.1:0/3061411355 2016-03-08 15:19:03.305066 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.305069 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:03.305070 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.305070 7f82a5352700 20 allow all 2016-03-08 15:19:03.305071 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:03.305074 7f82a5352700 20 is_capable service=osd command= read on cap allow * 2016-03-08 15:19:03.305074 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.305075 7f82a5352700 20 allow all 2016-03-08 15:19:03.305076 7f82a5352700 10 mon.a@0(leader).osd e3 check_sub 0x559cd188a4e0 next 0 (onetime) 2016-03-08 15:19:03.305080 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/3061411355 -- osd_map(3..3 src has 1..3) v3 -- ?+0 0x559cd1561900 con 0x559cd1479300 2016-03-08 15:19:03.305084 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message osd_map(3..3 src has 1..3) v3 remote, 127.0.0.1:0/3061411355, have pipe. 2016-03-08 15:19:03.305092 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 23/104857600 2016-03-08 15:19:03.305094 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1857200 2016-03-08 15:19:03.305108 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.305120 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer encoding 4 features 576460752303423487 0x559cd1561900 osd_map(3..3 src has 1..3) v3 2016-03-08 15:19:03.305129 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer signed seq # 4): sig = 0 2016-03-08 15:19:03.305133 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sending 4 0x559cd1561900 2016-03-08 15:19:03.305158 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.305163 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sleeping 2016-03-08 15:19:03.306595 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got ACK 2016-03-08 15:19:03.306605 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got ack seq 3 2016-03-08 15:19:03.306608 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader reading tag... 2016-03-08 15:19:03.306610 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got ACK 2016-03-08 15:19:03.306612 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got ack seq 4 2016-03-08 15:19:03.306614 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader reading tag... 2016-03-08 15:19:03.306616 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got MSG 2016-03-08 15:19:03.306619 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got envelope type=50 src client.4099 front=80 data=0 off 0 2016-03-08 15:19:03.306622 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader wants 80 bytes from policy throttler 0/104857600 2016-03-08 15:19:03.306625 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader wants 80 from dispatch throttler 0/104857600 2016-03-08 15:19:03.306628 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got front 80 2016-03-08 15:19:03.306630 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).aborted = 0 2016-03-08 15:19:03.306632 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got 80 + 0 + 0 byte message 2016-03-08 15:19:03.306640 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got message 4 0x559cd1555680 mon_command({"prefix": "get_command_descriptions"} v 0) v1 2016-03-08 15:19:03.306643 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1555680 prio 127 2016-03-08 15:19:03.306648 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader reading tag... 2016-03-08 15:19:03.306654 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4099 127.0.0.1:0/3061411355 4 ==== mon_command({"prefix": "get_command_descriptions"} v 0) v1 ==== 80+0+0 (3134233619 0 0) 0x559cd1555680 con 0x559cd1479300 2016-03-08 15:19:03.306658 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.306671 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).write_ack 4 2016-03-08 15:19:03.306678 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.306680 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sleeping 2016-03-08 15:19:03.306694 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d7200 for client.? 127.0.0.1:0/3061411355 2016-03-08 15:19:03.306700 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.308253 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd18094e0 0x559cd1555d40 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:03.308257 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/3061411355 -- mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 -- ?+37345 0x559cd1555d40 con 0x559cd1479300 2016-03-08 15:19:03.308262 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 remote, 127.0.0.1:0/3061411355, have pipe. 2016-03-08 15:19:03.308276 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 80 to dispatch throttler 80/104857600 2016-03-08 15:19:03.308279 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555680 2016-03-08 15:19:03.308282 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.308293 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer encoding 5 features 576460752303423487 0x559cd1555d40 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:03.308304 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer signed seq # 5): sig = 0 2016-03-08 15:19:03.308307 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sending 5 0x559cd1555d40 2016-03-08 15:19:03.308350 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.308353 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sleeping 2016-03-08 15:19:03.355948 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got ACK 2016-03-08 15:19:03.355970 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got ack seq 5 2016-03-08 15:19:03.355973 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader reading tag... 2016-03-08 15:19:03.355975 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got MSG 2016-03-08 15:19:03.355977 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got envelope type=50 src client.4099 front=114 data=0 off 0 2016-03-08 15:19:03.355981 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader wants 114 bytes from policy throttler 0/104857600 2016-03-08 15:19:03.355987 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader wants 114 from dispatch throttler 0/104857600 2016-03-08 15:19:03.355991 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got front 114 2016-03-08 15:19:03.355994 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).aborted = 0 2016-03-08 15:19:03.355996 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got 114 + 0 + 0 byte message 2016-03-08 15:19:03.356006 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader got message 5 0x559cd15558c0 mon_command({"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"} v 0) v1 2016-03-08 15:19:03.356020 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd15558c0 prio 127 2016-03-08 15:19:03.356025 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader reading tag... 2016-03-08 15:19:03.356027 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.356035 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).write_ack 5 2016-03-08 15:19:03.356042 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.356044 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sleeping 2016-03-08 15:19:03.356048 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4099 127.0.0.1:0/3061411355 5 ==== mon_command({"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"} v 0) v1 ==== 114+0+0 (3167933768 0 0) 0x559cd15558c0 con 0x559cd1479300 2016-03-08 15:19:03.356094 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d7200 for client.? 127.0.0.1:0/3061411355 2016-03-08 15:19:03.356097 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.356123 7f82a5352700 0 mon.a@0(leader) e1 handle_command mon_command({"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"} v 0) v1 2016-03-08 15:19:03.356139 7f82a5352700 20 is_capable service=osd command=osd create read write on cap allow * 2016-03-08 15:19:03.356141 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.356142 7f82a5352700 20 allow all 2016-03-08 15:19:03.356142 7f82a5352700 10 mon.a@0(leader) e1 _allowed_command capable 2016-03-08 15:19:03.356147 7f82a5352700 0 log_channel(audit) log [INF] : from='client.? 127.0.0.1:0/3061411355' entity='client.bootstrap-osd' cmd=[{"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"}]: dispatch 2016-03-08 15:19:03.356157 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 16 at 2016-03-08 15:19:03.356149) v1 -- ?+0 0x559cd1563480 con 0x559cd1477080 2016-03-08 15:19:03.356162 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 16 at 2016-03-08 15:19:03.356149) v1 local 2016-03-08 15:19:03.356185 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..3) dispatch 0x559cd15558c0 mon_command({"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"} v 0) v1 from client.4099 127.0.0.1:0/3061411355 con 0x559cd1479300 2016-03-08 15:19:03.356190 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..15) is_readable = 1 - now=2016-03-08 15:19:03.356190 lease_expire=0.000000 has v0 lc 15 2016-03-08 15:19:03.356195 7f82a5352700 10 mon.a@0(leader).osd e3 preprocess_query mon_command({"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"} v 0) v1 from client.4099 127.0.0.1:0/3061411355 2016-03-08 15:19:03.356218 7f82a5352700 7 mon.a@0(leader).osd e3 prepare_update mon_command({"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"} v 0) v1 from client.4099 127.0.0.1:0/3061411355 2016-03-08 15:19:03.356237 7f82a5352700 10 mon.a@0(leader).osd e3 osd create got uuid ad2b7a59-c78c-449e-86c6-e537c3b12550 2016-03-08 15:19:03.356240 7f82a5352700 10 mon.a@0(leader).osd e3 creating osd.0 2016-03-08 15:19:03.356248 7f82a5352700 10 mon.a@0(leader).osd e3 should_propose 2016-03-08 15:19:03.356250 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..3) setting proposal_timer 0x559cd1494510 with delay of 0.05 2016-03-08 15:19:03.356260 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 114 to dispatch throttler 114/104857600 2016-03-08 15:19:03.356263 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15558c0 2016-03-08 15:19:03.356265 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 16 at 2016-03-08 15:19:03.356149) v1 ==== 0+0+0 (0 0 0) 0x559cd1563480 con 0x559cd1477080 2016-03-08 15:19:03.356290 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.356293 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.356303 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..7) dispatch 0x559cd1563480 log(1 entries from seq 16 at 2016-03-08 15:19:03.356149) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:03.356307 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..15) is_readable = 1 - now=2016-03-08 15:19:03.356307 lease_expire=0.000000 has v0 lc 15 2016-03-08 15:19:03.356313 7f82a5352700 10 mon.a@0(leader).log v7 preprocess_query log(1 entries from seq 16 at 2016-03-08 15:19:03.356149) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.356318 7f82a5352700 10 mon.a@0(leader).log v7 preprocess_log log(1 entries from seq 16 at 2016-03-08 15:19:03.356149) v1 from mon.0 2016-03-08 15:19:03.356320 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:03.356321 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.356324 7f82a5352700 20 allow all 2016-03-08 15:19:03.356340 7f82a5352700 10 mon.a@0(leader).log v7 prepare_update log(1 entries from seq 16 at 2016-03-08 15:19:03.356149) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.356352 7f82a5352700 10 mon.a@0(leader).log v7 prepare_log log(1 entries from seq 16 at 2016-03-08 15:19:03.356149) v1 from mon.0 2016-03-08 15:19:03.356356 7f82a5352700 10 mon.a@0(leader).log v7 logging 2016-03-08 15:19:03.356149 mon.0 127.0.0.1:7104/0 16 : audit [INF] from='client.? 127.0.0.1:0/3061411355' entity='client.bootstrap-osd' cmd=[{"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"}]: dispatch 2016-03-08 15:19:03.356365 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..7) setting proposal_timer 0x559cd1494520 with delay of 0.05 2016-03-08 15:19:03.356370 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1563480 2016-03-08 15:19:03.406392 7f82a5b53700 10 mon.a@0(leader).paxosservice(osdmap 1..3) propose_pending 2016-03-08 15:19:03.406411 7f82a5b53700 10 mon.a@0(leader).osd e3 encode_pending e 4 2016-03-08 15:19:03.406414 7f82a5b53700 2 mon.a@0(leader).osd e3 osd.0 DNE 2016-03-08 15:19:03.406468 7f82a5b53700 20 mon.a@0(leader).osd e3 full_crc 474695405 inc_crc 3645455204 2016-03-08 15:19:03.406477 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..15) queue_pending_finisher 0x559cd1494480 2016-03-08 15:19:03.406482 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..15) trigger_propose active, proposing now 2016-03-08 15:19:03.406498 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..15) propose_pending 16 1766 bytes 2016-03-08 15:19:03.406501 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..15) begin for 16 1766 bytes 2016-03-08 15:19:03.414743 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..15) commit_start 16 2016-03-08 15:19:03.414785 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..7) propose_pending 2016-03-08 15:19:03.414788 7f82a5b53700 10 mon.a@0(leader).log v7 encode_full log v 7 2016-03-08 15:19:03.414814 7f82a5b53700 10 mon.a@0(leader).log v7 encode_pending v8 2016-03-08 15:19:03.414820 7f82a5b53700 5 mon.a@0(leader).paxos(paxos writing c 1..15) queue_pending_finisher 0x559cd1494510 2016-03-08 15:19:03.414822 7f82a5b53700 10 mon.a@0(leader).paxos(paxos writing c 1..15) trigger_propose not active, will propose later 2016-03-08 15:19:03.426800 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..15) commit_finish 16 2016-03-08 15:19:03.426852 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:03.426868 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..4) refresh 2016-03-08 15:19:03.426893 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:03.426901 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) refresh 2016-03-08 15:19:03.426902 7f82a6b55700 15 mon.a@0(leader).osd e3 update_from_paxos paxos e 4, my e 3 2016-03-08 15:19:03.426915 7f82a6b55700 7 mon.a@0(leader).osd e3 update_from_paxos applying incremental 4 2016-03-08 15:19:03.426969 7f82a6b55700 1 mon.a@0(leader).osd e4 e4: 1 osds: 0 up, 0 in 2016-03-08 15:19:03.438036 7f82a6b55700 5 mon.a@0(leader).paxos(paxos refresh c 1..16) is_readable = 0 - now=2016-03-08 15:19:03.438043 lease_expire=0.000000 has v0 lc 16 2016-03-08 15:19:03.438063 7f82a6b55700 10 mon.a@0(leader).pg v4 check_osd_map -- osdmap not readable, waiting 2016-03-08 15:19:03.438066 7f82a6b55700 10 mon.a@0(leader).osd e4 check_subs 2016-03-08 15:19:03.438069 7f82a6b55700 10 mon.a@0(leader).osd e4 share_map_with_random_osd no up osds, don't share with anyone 2016-03-08 15:19:03.438070 7f82a6b55700 10 mon.a@0(leader).osd e4 update_logger 2016-03-08 15:19:03.438107 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..7) refresh 2016-03-08 15:19:03.438109 7f82a6b55700 10 mon.a@0(leader).log v7 update_from_paxos 2016-03-08 15:19:03.438110 7f82a6b55700 10 mon.a@0(leader).log v7 update_from_paxos version 7 summary v 7 2016-03-08 15:19:03.438120 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:03.438127 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:03.438128 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:03.438130 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..4) post_refresh 2016-03-08 15:19:03.438131 7f82a6b55700 10 mon.a@0(leader).pg v4 post_paxos_update 2016-03-08 15:19:03.438132 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:03.438133 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) post_refresh 2016-03-08 15:19:03.438134 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..7) post_refresh 2016-03-08 15:19:03.438134 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:03.438135 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:03.438137 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..16) commit_proposal 2016-03-08 15:19:03.438139 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) _active - not active 2016-03-08 15:19:03.438141 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..16) finish_round 2016-03-08 15:19:03.438142 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..16) finish_round waiting_for_acting 2016-03-08 15:19:03.438143 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) _active 2016-03-08 15:19:03.438144 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) remove_legacy_versions 2016-03-08 15:19:03.438148 7f82a6b55700 7 mon.a@0(leader).paxosservice(osdmap 1..4) _active creating new pending 2016-03-08 15:19:03.438153 7f82a6b55700 10 mon.a@0(leader).osd e4 create_pending e 5 2016-03-08 15:19:03.438197 7f82a6b55700 0 log_channel(audit) log [INF] : from='client.? 127.0.0.1:0/3061411355' entity='client.bootstrap-osd' cmd='[{"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"}]': finished 2016-03-08 15:19:03.438212 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 17 at 2016-03-08 15:19:03.438199) v1 -- ?+0 0x559cd17cac80 con 0x559cd1477080 2016-03-08 15:19:03.438218 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 17 at 2016-03-08 15:19:03.438199) v1 local 2016-03-08 15:19:03.438235 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd1809720 0x559cd1556d00 mon_command_ack([{"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"}]=0 v4) v1 2016-03-08 15:19:03.438237 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/3061411355 -- mon_command_ack([{"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"}]=0 v4) v1 -- ?+2 0x559cd1556d00 con 0x559cd1479300 2016-03-08 15:19:03.438241 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"}]=0 v4) v1 remote, 127.0.0.1:0/3061411355, have pipe. 2016-03-08 15:19:03.438265 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..16) is_readable = 1 - now=2016-03-08 15:19:03.438265 lease_expire=0.000000 has v0 lc 16 2016-03-08 15:19:03.438270 7f82a6b55700 10 mon.a@0(leader).pg v4 check_osd_map applying osdmap e4 to pg_map 2016-03-08 15:19:03.438265 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.438263 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 17 at 2016-03-08 15:19:03.438199) v1 ==== 0+0+0 (0 0 0) 0x559cd17cac80 con 0x559cd1477080 2016-03-08 15:19:03.438278 7f82a6b55700 10 mon.a@0(leader).pg v4 check_osd_map osd.0 created or destroyed 2016-03-08 15:19:03.438275 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer encoding 6 features 576460752303423487 0x559cd1556d00 mon_command_ack([{"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"}]=0 v4) v1 2016-03-08 15:19:03.438280 7f82a6b55700 10 mon.a@0(leader).pg v4 map_pg_creates to 4 pgs, osdmap epoch 4 2016-03-08 15:19:03.438287 7f82a6b55700 10 mon.a@0(leader).pg v4 register_new_pgs checking pg pools for osdmap epoch 4, last_pg_scan 3 2016-03-08 15:19:03.438288 7f82a6b55700 10 mon.a@0(leader).pg v4 no change in pool 1 replicated size 3 min_size 2 crush_ruleset 0 object_hash rjenkins pg_num 4 pgp_num 4 last_change 3 flags hashpspool stripe_width 0 2016-03-08 15:19:03.438293 7f82a6b55700 10 mon.a@0(leader).pg v4 register_new_pgs registered 0 new pgs, removed 0 uncreated pgs 2016-03-08 15:19:03.438294 7f82a6b55700 10 mon.a@0(leader).pg v4 check_down_pgs 2016-03-08 15:19:03.438292 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer signed seq # 6): sig = 0 2016-03-08 15:19:03.438296 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..4) propose_pending 2016-03-08 15:19:03.438296 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sending 6 0x559cd1556d00 2016-03-08 15:19:03.438298 7f82a6b55700 10 mon.a@0(leader).pg v4 encode_pending v 5 2016-03-08 15:19:03.438318 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..16) queue_pending_finisher 0x559cd1494390 2016-03-08 15:19:03.438321 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..16) trigger_propose active, proposing now 2016-03-08 15:19:03.438329 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..16) propose_pending 17 5118 bytes 2016-03-08 15:19:03.438332 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..16) begin for 17 5118 bytes 2016-03-08 15:19:03.438341 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer: state = open policy.server=1 2016-03-08 15:19:03.438346 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).writer sleeping 2016-03-08 15:19:03.449429 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..16) commit_start 17 2016-03-08 15:19:03.449478 7f82a6b55700 10 mon.a@0(leader).osd e4 update_logger 2016-03-08 15:19:03.449488 7f82a6b55700 0 log_channel(cluster) log [INF] : osdmap e4: 1 osds: 0 up, 0 in 2016-03-08 15:19:03.449506 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 18 at 2016-03-08 15:19:03.449489) v1 -- ?+0 0x559cd17caf00 con 0x559cd1477080 2016-03-08 15:19:03.449520 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 18 at 2016-03-08 15:19:03.449489) v1 local 2016-03-08 15:19:03.449527 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..16) finish_round waiting_for_readable 2016-03-08 15:19:03.449530 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..16) finish_round waiting_for_writeable 2016-03-08 15:19:03.449531 7f82a6b55700 10 mon.a@0(leader).paxos(paxos writing c 1..16) finish_round done w/ waiters, state 4 2016-03-08 15:19:03.449582 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.449589 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.449607 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..7) dispatch 0x559cd17cac80 log(1 entries from seq 17 at 2016-03-08 15:19:03.438199) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:03.449617 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..16) is_readable = 1 - now=2016-03-08 15:19:03.449617 lease_expire=0.000000 has v0 lc 16 2016-03-08 15:19:03.449634 7f82a5352700 10 mon.a@0(leader).log v7 preprocess_query log(1 entries from seq 17 at 2016-03-08 15:19:03.438199) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.449646 7f82a5352700 10 mon.a@0(leader).log v7 preprocess_log log(1 entries from seq 17 at 2016-03-08 15:19:03.438199) v1 from mon.0 2016-03-08 15:19:03.449653 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:03.449655 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.449657 7f82a5352700 20 allow all 2016-03-08 15:19:03.449661 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..7) waiting for paxos -> writeable 2016-03-08 15:19:03.449676 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17cac80 2016-03-08 15:19:03.449682 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 18 at 2016-03-08 15:19:03.449489) v1 ==== 0+0+0 (0 0 0) 0x559cd17caf00 con 0x559cd1477080 2016-03-08 15:19:03.449729 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.449735 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.449750 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..7) dispatch 0x559cd17caf00 log(1 entries from seq 18 at 2016-03-08 15:19:03.449489) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:03.449756 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..16) is_readable = 1 - now=2016-03-08 15:19:03.449757 lease_expire=0.000000 has v0 lc 16 2016-03-08 15:19:03.449773 7f82a5352700 10 mon.a@0(leader).log v7 preprocess_query log(1 entries from seq 18 at 2016-03-08 15:19:03.449489) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.449783 7f82a5352700 10 mon.a@0(leader).log v7 preprocess_log log(1 entries from seq 18 at 2016-03-08 15:19:03.449489) v1 from mon.0 2016-03-08 15:19:03.449788 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:03.449790 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.449791 7f82a5352700 20 allow all 2016-03-08 15:19:03.449802 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..7) waiting for paxos -> writeable 2016-03-08 15:19:03.449817 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17caf00 2016-03-08 15:19:03.456710 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..16) commit_finish 17 2016-03-08 15:19:03.456737 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:03.456752 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) refresh 2016-03-08 15:19:03.456755 7f82a6b55700 10 mon.a@0(leader).pg v4 update_from_paxos read_incremental 2016-03-08 15:19:03.456761 7f82a6b55700 20 mon.a@0(leader).pg v4 refreshing osd.0 2016-03-08 15:19:03.456781 7f82a6b55700 10 mon.a@0(leader).pg v5 read_pgmap_meta 2016-03-08 15:19:03.456796 7f82a6b55700 10 mon.a@0(leader).pg v5 update_logger 2016-03-08 15:19:03.456806 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:03.456816 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) refresh 2016-03-08 15:19:03.456824 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..8) refresh 2016-03-08 15:19:03.456827 7f82a6b55700 10 mon.a@0(leader).log v8 update_from_paxos 2016-03-08 15:19:03.456828 7f82a6b55700 10 mon.a@0(leader).log v8 update_from_paxos version 8 summary v 7 2016-03-08 15:19:03.456832 7f82a6b55700 10 mon.a@0(leader).log v8 update_from_paxos latest full 7 2016-03-08 15:19:03.456837 7f82a6b55700 7 mon.a@0(leader).log v8 update_from_paxos applying incremental log 8 2016-03-08 15:19:03.356149 mon.0 127.0.0.1:7104/0 16 : audit [INF] from='client.? 127.0.0.1:0/3061411355' entity='client.bootstrap-osd' cmd=[{"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"}]: dispatch 2016-03-08 15:19:03.456849 7f82a6b55700 20 mon.a@0(leader).log v8 update_from_paxos logging for channel 'audit' to file 'testdir/osd-crush/log' 2016-03-08 15:19:03.456858 7f82a6b55700 15 mon.a@0(leader).log v8 update_from_paxos logging for 1 channels 2016-03-08 15:19:03.456860 7f82a6b55700 15 mon.a@0(leader).log v8 update_from_paxos channel 'audit' logging 225 bytes 2016-03-08 15:19:03.456882 7f82a6b55700 10 mon.a@0(leader).log v8 check_subs 2016-03-08 15:19:03.456899 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:03.456908 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:03.456910 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:03.456912 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) post_refresh 2016-03-08 15:19:03.456913 7f82a6b55700 10 mon.a@0(leader).pg v5 post_paxos_update 2016-03-08 15:19:03.456914 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:03.456915 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) post_refresh 2016-03-08 15:19:03.456915 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..8) post_refresh 2016-03-08 15:19:03.456923 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:03.456924 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:03.456925 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..17) commit_proposal 2016-03-08 15:19:03.456927 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..8) _active - not active 2016-03-08 15:19:03.456928 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) _active - not active 2016-03-08 15:19:03.456929 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..17) finish_round 2016-03-08 15:19:03.456930 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..17) finish_round waiting_for_acting 2016-03-08 15:19:03.456931 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..8) _active 2016-03-08 15:19:03.456932 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..8) remove_legacy_versions 2016-03-08 15:19:03.456936 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..8) _active creating new pending 2016-03-08 15:19:03.456941 7f82a6b55700 10 mon.a@0(leader).log v8 create_pending v 9 2016-03-08 15:19:03.456948 7f82a6b55700 7 mon.a@0(leader).log v8 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.456959 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd1809de0 0x559cd15558c0 log(last 16) v1 2016-03-08 15:19:03.456961 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 16) v1 -- ?+0 0x559cd15558c0 con 0x559cd1477080 2016-03-08 15:19:03.456964 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 16) v1 local 2016-03-08 15:19:03.456988 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..8) dispatch 0x559cd17cac80 log(1 entries from seq 17 at 2016-03-08 15:19:03.438199) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:03.456993 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..17) is_readable = 1 - now=2016-03-08 15:19:03.456993 lease_expire=0.000000 has v0 lc 17 2016-03-08 15:19:03.457000 7f82a6b55700 10 mon.a@0(leader).log v8 preprocess_query log(1 entries from seq 17 at 2016-03-08 15:19:03.438199) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.456997 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 16) v1 ==== 0+0+0 (0 0 0) 0x559cd15558c0 con 0x559cd1477080 2016-03-08 15:19:03.457005 7f82a6b55700 10 mon.a@0(leader).log v8 preprocess_log log(1 entries from seq 17 at 2016-03-08 15:19:03.438199) v1 from mon.0 2016-03-08 15:19:03.457008 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:03.457009 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.457010 7f82a6b55700 20 allow all 2016-03-08 15:19:03.457014 7f82a6b55700 10 mon.a@0(leader).log v8 prepare_update log(1 entries from seq 17 at 2016-03-08 15:19:03.438199) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.457019 7f82a6b55700 10 mon.a@0(leader).log v8 prepare_log log(1 entries from seq 17 at 2016-03-08 15:19:03.438199) v1 from mon.0 2016-03-08 15:19:03.457021 7f82a6b55700 10 mon.a@0(leader).log v8 logging 2016-03-08 15:19:03.438199 mon.0 127.0.0.1:7104/0 17 : audit [INF] from='client.? 127.0.0.1:0/3061411355' entity='client.bootstrap-osd' cmd='[{"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"}]': finished 2016-03-08 15:19:03.457031 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..8) setting proposal_timer 0x559cd1494390 with delay of 0.0996866 2016-03-08 15:19:03.457049 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..8) dispatch 0x559cd17caf00 log(1 entries from seq 18 at 2016-03-08 15:19:03.449489) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:03.457053 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..17) is_readable = 1 - now=2016-03-08 15:19:03.457053 lease_expire=0.000000 has v0 lc 17 2016-03-08 15:19:03.457058 7f82a6b55700 10 mon.a@0(leader).log v8 preprocess_query log(1 entries from seq 18 at 2016-03-08 15:19:03.449489) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.457065 7f82a6b55700 10 mon.a@0(leader).log v8 preprocess_log log(1 entries from seq 18 at 2016-03-08 15:19:03.449489) v1 from mon.0 2016-03-08 15:19:03.457068 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:03.457072 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.457073 7f82a6b55700 20 allow all 2016-03-08 15:19:03.457077 7f82a6b55700 10 mon.a@0(leader).log v8 prepare_update log(1 entries from seq 18 at 2016-03-08 15:19:03.449489) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.457082 7f82a6b55700 10 mon.a@0(leader).log v8 prepare_log log(1 entries from seq 18 at 2016-03-08 15:19:03.449489) v1 from mon.0 2016-03-08 15:19:03.457085 7f82a6b55700 10 mon.a@0(leader).log v8 logging 2016-03-08 15:19:03.449489 mon.0 127.0.0.1:7104/0 18 : cluster [INF] osdmap e4: 1 osds: 0 up, 0 in 2016-03-08 15:19:03.457092 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..8) proposal_timer already set 2016-03-08 15:19:03.457095 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) _active 2016-03-08 15:19:03.457097 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) remove_legacy_versions 2016-03-08 15:19:03.457103 7f82a6b55700 7 mon.a@0(leader).paxosservice(pgmap 1..5) _active creating new pending 2016-03-08 15:19:03.457105 7f82a6b55700 10 mon.a@0(leader).pg v5 create_pending v 6 2016-03-08 15:19:03.457107 7f82a6b55700 10 mon.a@0(leader).pg v5 check_osd_map already seen 4 >= 4 2016-03-08 15:19:03.457108 7f82a6b55700 10 mon.a@0(leader).pg v5 update_logger 2016-03-08 15:19:03.457118 7f82a6b55700 0 log_channel(cluster) log [INF] : pgmap v5: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:03.457134 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 19 at 2016-03-08 15:19:03.457120) v1 -- ?+0 0x559cd1563480 con 0x559cd1477080 2016-03-08 15:19:03.457138 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 19 at 2016-03-08 15:19:03.457120) v1 local 2016-03-08 15:19:03.457143 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..17) finish_round waiting_for_readable 2016-03-08 15:19:03.457144 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..17) finish_round waiting_for_writeable 2016-03-08 15:19:03.457145 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..17) finish_round done w/ waiters, state 1 2016-03-08 15:19:03.457180 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.457185 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.457191 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:03.457192 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.457193 7f82a5352700 20 allow all 2016-03-08 15:19:03.457214 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15558c0 2016-03-08 15:19:03.457221 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 19 at 2016-03-08 15:19:03.457120) v1 ==== 0+0+0 (0 0 0) 0x559cd1563480 con 0x559cd1477080 2016-03-08 15:19:03.457264 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.457269 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.457287 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..8) dispatch 0x559cd1563480 log(1 entries from seq 19 at 2016-03-08 15:19:03.457120) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:03.457294 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..17) is_readable = 1 - now=2016-03-08 15:19:03.457294 lease_expire=0.000000 has v0 lc 17 2016-03-08 15:19:03.457308 7f82a5352700 10 mon.a@0(leader).log v8 preprocess_query log(1 entries from seq 19 at 2016-03-08 15:19:03.457120) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.457320 7f82a5352700 10 mon.a@0(leader).log v8 preprocess_log log(1 entries from seq 19 at 2016-03-08 15:19:03.457120) v1 from mon.0 2016-03-08 15:19:03.457325 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:03.457327 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.457328 7f82a5352700 20 allow all 2016-03-08 15:19:03.457341 7f82a5352700 10 mon.a@0(leader).log v8 prepare_update log(1 entries from seq 19 at 2016-03-08 15:19:03.457120) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.457352 7f82a5352700 10 mon.a@0(leader).log v8 prepare_log log(1 entries from seq 19 at 2016-03-08 15:19:03.457120) v1 from mon.0 2016-03-08 15:19:03.457358 7f82a5352700 10 mon.a@0(leader).log v8 logging 2016-03-08 15:19:03.457120 mon.0 127.0.0.1:7104/0 19 : cluster [INF] pgmap v5: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:03.457374 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..8) proposal_timer already set 2016-03-08 15:19:03.457378 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1563480 2016-03-08 15:19:03.470710 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).reader couldn't read tag, (0) Success 2016-03-08 15:19:03.470733 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).fault (0) Success 2016-03-08 15:19:03.470775 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).fault on lossy channel, failing 2016-03-08 15:19:03.470781 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1479300).stop 2016-03-08 15:19:03.470789 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479300).unregister_pipe 2016-03-08 15:19:03.470794 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479300).discard_queue 2016-03-08 15:19:03.470804 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479300).reader done 2016-03-08 15:19:03.470807 7f82a5352700 10 mon.a@0(leader) e1 ms_handle_reset 0x559cd1479300 127.0.0.1:0/3061411355 2016-03-08 15:19:03.470815 7f82a5352700 10 mon.a@0(leader) e1 reset/close on session client.? 127.0.0.1:0/3061411355 2016-03-08 15:19:03.470819 7f82a5352700 10 mon.a@0(leader) e1 remove_session 0x559cd14d7200 client.? 127.0.0.1:0/3061411355 2016-03-08 15:19:03.470822 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479300).writer finishing 2016-03-08 15:19:03.470833 7f82a334e700 10 -- 127.0.0.1:7104/0 queue_reap 0x559cd17ac800 2016-03-08 15:19:03.470841 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479300).writer done 2016-03-08 15:19:03.470845 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper 2016-03-08 15:19:03.470849 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaping pipe 0x559cd17ac800 127.0.0.1:0/3061411355 2016-03-08 15:19:03.470852 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479300).discard_queue 2016-03-08 15:19:03.470855 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479300).unregister_pipe - not registered 2016-03-08 15:19:03.470861 7f82a6354700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/3061411355 pipe(0x559cd17ac800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1479300).join 2016-03-08 15:19:03.470884 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaped pipe 0x559cd17ac800 127.0.0.1:0/3061411355 2016-03-08 15:19:03.470892 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper deleted pipe 0x559cd17ac800 2016-03-08 15:19:03.470893 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper done 2016-03-08 15:19:03.556790 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..8) propose_pending 2016-03-08 15:19:03.556817 7f82a5b53700 10 mon.a@0(leader).log v8 encode_full log v 8 2016-03-08 15:19:03.556840 7f82a5b53700 10 mon.a@0(leader).log v8 encode_pending v9 2016-03-08 15:19:03.556846 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..17) queue_pending_finisher 0x559cd1494520 2016-03-08 15:19:03.556849 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..17) trigger_propose active, proposing now 2016-03-08 15:19:03.556854 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..17) propose_pending 18 5442 bytes 2016-03-08 15:19:03.556856 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..17) begin for 18 5442 bytes 2016-03-08 15:19:03.566425 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..17) commit_start 18 2016-03-08 15:19:03.573403 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..17) commit_finish 18 2016-03-08 15:19:03.573445 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:03.573466 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) refresh 2016-03-08 15:19:03.573476 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:03.573483 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) refresh 2016-03-08 15:19:03.573491 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..9) refresh 2016-03-08 15:19:03.573493 7f82a6b55700 10 mon.a@0(leader).log v9 update_from_paxos 2016-03-08 15:19:03.573495 7f82a6b55700 10 mon.a@0(leader).log v9 update_from_paxos version 9 summary v 8 2016-03-08 15:19:03.573500 7f82a6b55700 10 mon.a@0(leader).log v9 update_from_paxos latest full 8 2016-03-08 15:19:03.573508 7f82a6b55700 7 mon.a@0(leader).log v9 update_from_paxos applying incremental log 9 2016-03-08 15:19:03.438199 mon.0 127.0.0.1:7104/0 17 : audit [INF] from='client.? 127.0.0.1:0/3061411355' entity='client.bootstrap-osd' cmd='[{"prefix": "osd create", "uuid": "ad2b7a59-c78c-449e-86c6-e537c3b12550"}]': finished 2016-03-08 15:19:03.573522 7f82a6b55700 20 mon.a@0(leader).log v9 update_from_paxos logging for channel 'audit' to file 'testdir/osd-crush/log' 2016-03-08 15:19:03.573532 7f82a6b55700 7 mon.a@0(leader).log v9 update_from_paxos applying incremental log 9 2016-03-08 15:19:03.449489 mon.0 127.0.0.1:7104/0 18 : cluster [INF] osdmap e4: 1 osds: 0 up, 0 in 2016-03-08 15:19:03.573537 7f82a6b55700 20 mon.a@0(leader).log v9 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:03.573543 7f82a6b55700 7 mon.a@0(leader).log v9 update_from_paxos applying incremental log 9 2016-03-08 15:19:03.457120 mon.0 127.0.0.1:7104/0 19 : cluster [INF] pgmap v5: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:03.573547 7f82a6b55700 20 mon.a@0(leader).log v9 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:03.573552 7f82a6b55700 15 mon.a@0(leader).log v9 update_from_paxos logging for 2 channels 2016-03-08 15:19:03.573554 7f82a6b55700 15 mon.a@0(leader).log v9 update_from_paxos channel 'audit' logging 227 bytes 2016-03-08 15:19:03.573570 7f82a6b55700 15 mon.a@0(leader).log v9 update_from_paxos channel 'cluster' logging 240 bytes 2016-03-08 15:19:03.573576 7f82a6b55700 10 mon.a@0(leader).log v9 check_subs 2016-03-08 15:19:03.573592 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:03.573600 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:03.573603 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:03.573605 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) post_refresh 2016-03-08 15:19:03.573606 7f82a6b55700 10 mon.a@0(leader).pg v5 post_paxos_update 2016-03-08 15:19:03.573607 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:03.573609 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) post_refresh 2016-03-08 15:19:03.573610 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..9) post_refresh 2016-03-08 15:19:03.573611 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:03.573611 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:03.573613 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..18) commit_proposal 2016-03-08 15:19:03.573615 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..9) _active - not active 2016-03-08 15:19:03.573617 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..18) finish_round 2016-03-08 15:19:03.573618 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..18) finish_round waiting_for_acting 2016-03-08 15:19:03.573619 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..9) _active 2016-03-08 15:19:03.573620 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..9) remove_legacy_versions 2016-03-08 15:19:03.573624 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..9) _active creating new pending 2016-03-08 15:19:03.573630 7f82a6b55700 10 mon.a@0(leader).log v9 create_pending v 10 2016-03-08 15:19:03.573639 7f82a6b55700 7 mon.a@0(leader).log v9 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.573646 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd18ba5a0 0x559cd1556ac0 log(last 17) v1 2016-03-08 15:19:03.573649 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 17) v1 -- ?+0 0x559cd1556ac0 con 0x559cd1477080 2016-03-08 15:19:03.573653 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 17) v1 local 2016-03-08 15:19:03.573681 7f82a6b55700 7 mon.a@0(leader).log v9 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.573688 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd18ba900 0x559cd1555f80 log(last 18) v1 2016-03-08 15:19:03.573679 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 17) v1 ==== 0+0+0 (0 0 0) 0x559cd1556ac0 con 0x559cd1477080 2016-03-08 15:19:03.573689 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 18) v1 -- ?+0 0x559cd1555f80 con 0x559cd1477080 2016-03-08 15:19:03.573691 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 18) v1 local 2016-03-08 15:19:03.573715 7f82a6b55700 7 mon.a@0(leader).log v9 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.573721 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd18bac60 0x559cd1555b00 log(last 19) v1 2016-03-08 15:19:03.573722 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 19) v1 -- ?+0 0x559cd1555b00 con 0x559cd1477080 2016-03-08 15:19:03.573724 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 19) v1 local 2016-03-08 15:19:03.573736 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..18) finish_round waiting_for_readable 2016-03-08 15:19:03.573738 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..18) finish_round waiting_for_writeable 2016-03-08 15:19:03.573740 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..18) finish_round done w/ waiters, state 1 2016-03-08 15:19:03.573775 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.573782 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.573788 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:03.573792 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.573795 7f82a5352700 20 allow all 2016-03-08 15:19:03.573816 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1556ac0 2016-03-08 15:19:03.573822 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 18) v1 ==== 0+0+0 (0 0 0) 0x559cd1555f80 con 0x559cd1477080 2016-03-08 15:19:03.573851 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.573855 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.573860 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:03.573861 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.573863 7f82a5352700 20 allow all 2016-03-08 15:19:03.573877 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555f80 2016-03-08 15:19:03.573881 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 19) v1 ==== 0+0+0 (0 0 0) 0x559cd1555b00 con 0x559cd1477080 2016-03-08 15:19:03.573905 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.573909 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.573914 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:03.573915 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.573916 7f82a5352700 20 allow all 2016-03-08 15:19:03.573931 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555b00 2016-03-08 15:19:03.577247 7f82a4350700 20 accepter.accepter poll got 1 2016-03-08 15:19:03.577252 7f82a4350700 10 accepter.pfd.revents=1 2016-03-08 15:19:03.577256 7f82a4350700 10 accepter.accepted incoming on sd 21 2016-03-08 15:19:03.577282 7f82a4350700 20 accepter.accepter calling poll 2016-03-08 15:19:03.577306 7f82aea26700 10 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd17aa000 sd=21 :0 s=0 pgs=0 cs=0 l=0 c=0x559cd18dfc00).accept 2016-03-08 15:19:03.577351 7f82aea26700 1 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd18dfc00).accept sd=21 127.0.0.1:52224/0 2016-03-08 15:19:03.577400 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd18dfc00).accept peer addr is 127.0.0.1:0/1824291270 2016-03-08 15:19:03.577409 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd18dfc00).accept got peer connect_seq 0 global_seq 1 2016-03-08 15:19:03.577417 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd18dfc00).accept of host_type 8, policy.lossy=1 policy.server=1 policy.standby=0 policy.resetcheck=0 2016-03-08 15:19:03.577420 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd18dfc00).accept my proto 15, their proto 15 2016-03-08 15:19:03.577422 7f82aea26700 10 mon.a@0(leader) e1 ms_verify_authorizer 127.0.0.1:0/1824291270 client protocol 0 2016-03-08 15:19:03.577425 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd18dfc00).accept: setting up session_security. 2016-03-08 15:19:03.577430 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd18dfc00).accept new session 2016-03-08 15:19:03.577433 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).accept success, connect_seq = 1, sending READY 2016-03-08 15:19:03.577434 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).accept features 576460752303423487 2016-03-08 15:19:03.577441 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).register_pipe 2016-03-08 15:19:03.577460 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).discard_requeued_up_to 0 2016-03-08 15:19:03.577464 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).accept starting writer, state open 2016-03-08 15:19:03.577475 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).accept done 2016-03-08 15:19:03.577481 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader reading tag... 2016-03-08 15:19:03.577489 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577496 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sleeping 2016-03-08 15:19:03.577501 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got KEEPALIVE2 2016-03-08 15:19:03.577484 2016-03-08 15:19:03.577513 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader reading tag... 2016-03-08 15:19:03.577514 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577516 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).write_keepalive2 15 2016-03-08 15:19:03.577484 2016-03-08 15:19:03.577529 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got MSG 2016-03-08 15:19:03.577533 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got envelope type=17 src client.? front=68 data=0 off 0 2016-03-08 15:19:03.577535 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader wants 68 bytes from policy throttler 0/104857600 2016-03-08 15:19:03.577539 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader wants 68 from dispatch throttler 0/104857600 2016-03-08 15:19:03.577542 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got front 68 2016-03-08 15:19:03.577543 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577544 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).aborted = 0 2016-03-08 15:19:03.577546 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sleeping 2016-03-08 15:19:03.577546 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got 68 + 0 + 0 byte message 2016-03-08 15:19:03.577554 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got message 1 0x559cd17ccd00 auth(proto 0 38 bytes epoch 0) v1 2016-03-08 15:19:03.577557 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd17ccd00 prio 127 2016-03-08 15:19:03.577562 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader reading tag... 2016-03-08 15:19:03.577564 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/1824291270 1 ==== auth(proto 0 38 bytes epoch 0) v1 ==== 68+0+0 (424667196 0 0) 0x559cd17ccd00 con 0x559cd18dfc00 2016-03-08 15:19:03.577565 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577572 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).write_ack 1 2016-03-08 15:19:03.577576 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577578 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sleeping 2016-03-08 15:19:03.577584 7f82a5352700 10 mon.a@0(leader) e1 _ms_dispatch new session 0x559cd1857000 MonSession(client.? 127.0.0.1:0/1824291270 is open) 2016-03-08 15:19:03.577587 7f82a5352700 20 mon.a@0(leader) e1 caps 2016-03-08 15:19:03.577595 7f82a5352700 10 mon.a@0(leader).paxosservice(auth 1..2) dispatch 0x559cd17ccd00 auth(proto 0 38 bytes epoch 0) v1 from client.? 127.0.0.1:0/1824291270 con 0x559cd18dfc00 2016-03-08 15:19:03.577598 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..18) is_readable = 1 - now=2016-03-08 15:19:03.577598 lease_expire=0.000000 has v0 lc 18 2016-03-08 15:19:03.577601 7f82a5352700 10 mon.a@0(leader).auth v2 preprocess_query auth(proto 0 38 bytes epoch 0) v1 from client.? 127.0.0.1:0/1824291270 2016-03-08 15:19:03.577603 7f82a5352700 10 mon.a@0(leader).auth v2 prep_auth() blob_size=38 2016-03-08 15:19:03.577610 7f82a5352700 10 mon.a@0(leader).auth v2 AuthMonitor::assign_global_id m=auth(proto 0 38 bytes epoch 0) v1 mon=0/1 last_allocated=4099 max_global_id=14096 2016-03-08 15:19:03.577611 7f82a5352700 10 mon.a@0(leader).auth v2 next_global_id should be 4100 2016-03-08 15:19:03.577615 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1824291270 -- mon_map magic: 0 v1 -- ?+0 0x559cd1556640 con 0x559cd18dfc00 2016-03-08 15:19:03.577618 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/1824291270, have pipe. 2016-03-08 15:19:03.577623 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577626 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer encoding 1 features 576460752303423487 0x559cd1556640 mon_map magic: 0 v1 2016-03-08 15:19:03.577627 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd18bb320 0x559cd1563980 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:03.577630 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1824291270 -- auth_reply(proto 1 0 (0) Success) v1 -- ?+0 0x559cd1563980 con 0x559cd18dfc00 2016-03-08 15:19:03.577636 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer signed seq # 1): sig = 0 2016-03-08 15:19:03.577640 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sending 1 0x559cd1556640 2016-03-08 15:19:03.577641 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message auth_reply(proto 1 0 (0) Success) v1 remote, 127.0.0.1:0/1824291270, have pipe. 2016-03-08 15:19:03.577651 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 68 to dispatch throttler 68/104857600 2016-03-08 15:19:03.577653 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17ccd00 2016-03-08 15:19:03.577652 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577655 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer encoding 2 features 576460752303423487 0x559cd1563980 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:03.577661 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer signed seq # 2): sig = 0 2016-03-08 15:19:03.577663 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sending 2 0x559cd1563980 2016-03-08 15:19:03.577672 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577674 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sleeping 2016-03-08 15:19:03.577790 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got ACK 2016-03-08 15:19:03.577799 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got ack seq 1 2016-03-08 15:19:03.577802 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader reading tag... 2016-03-08 15:19:03.577803 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got ACK 2016-03-08 15:19:03.577805 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got ack seq 2 2016-03-08 15:19:03.577807 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader reading tag... 2016-03-08 15:19:03.577809 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got MSG 2016-03-08 15:19:03.577810 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got envelope type=15 src client.? front=23 data=0 off 0 2016-03-08 15:19:03.577813 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader wants 23 bytes from policy throttler 0/104857600 2016-03-08 15:19:03.577815 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader wants 23 from dispatch throttler 0/104857600 2016-03-08 15:19:03.577819 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got front 23 2016-03-08 15:19:03.577821 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).aborted = 0 2016-03-08 15:19:03.577822 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:03.577829 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got message 2 0x559cd1857400 mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:03.577833 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1857400 prio 127 2016-03-08 15:19:03.577837 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader reading tag... 2016-03-08 15:19:03.577839 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577840 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/1824291270 2 ==== mon_subscribe({monmap=0+}) v2 ==== 23+0+0 (1620593354 0 0) 0x559cd1857400 con 0x559cd18dfc00 2016-03-08 15:19:03.577846 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).write_ack 2 2016-03-08 15:19:03.577857 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577860 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got MSG 2016-03-08 15:19:03.577863 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got envelope type=15 src client.4100 front=23 data=0 off 0 2016-03-08 15:19:03.577865 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader wants 23 bytes from policy throttler 23/104857600 2016-03-08 15:19:03.577863 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sleeping 2016-03-08 15:19:03.577867 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader wants 23 from dispatch throttler 23/104857600 2016-03-08 15:19:03.577870 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got front 23 2016-03-08 15:19:03.577872 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).aborted = 0 2016-03-08 15:19:03.577872 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857000 for client.? 127.0.0.1:0/1824291270 2016-03-08 15:19:03.577874 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.577874 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:03.577877 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:03.577878 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got message 3 0x559cd1857600 mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:03.577880 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.577881 7f82a5352700 20 allow all 2016-03-08 15:19:03.577880 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1857600 prio 127 2016-03-08 15:19:03.577882 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:03.577884 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader reading tag... 2016-03-08 15:19:03.577887 7f82a5352700 10 mon.a@0(leader) e1 check_sub monmap next 0 have 1 2016-03-08 15:19:03.577891 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1824291270 -- mon_map magic: 0 v1 -- ?+0 0x559cd15546c0 con 0x559cd18dfc00 2016-03-08 15:19:03.577891 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577897 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).write_ack 3 2016-03-08 15:19:03.577898 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/1824291270, have pipe. 2016-03-08 15:19:03.577902 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer encoding 3 features 576460752303423487 0x559cd15546c0 mon_map magic: 0 v1 2016-03-08 15:19:03.577907 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer signed seq # 3): sig = 0 2016-03-08 15:19:03.577909 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 46/104857600 2016-03-08 15:19:03.577912 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1857400 2016-03-08 15:19:03.577911 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sending 3 0x559cd15546c0 2016-03-08 15:19:03.577917 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4100 127.0.0.1:0/1824291270 3 ==== mon_subscribe({osdmap=0}) v2 ==== 23+0+0 (319581110 0 0) 0x559cd1857600 con 0x559cd18dfc00 2016-03-08 15:19:03.577938 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577941 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857000 for client.? 127.0.0.1:0/1824291270 2016-03-08 15:19:03.577945 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sleeping 2016-03-08 15:19:03.577947 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.577960 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:03.577961 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.577961 7f82a5352700 20 allow all 2016-03-08 15:19:03.577962 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:03.577964 7f82a5352700 20 is_capable service=osd command= read on cap allow * 2016-03-08 15:19:03.577964 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.577965 7f82a5352700 20 allow all 2016-03-08 15:19:03.577966 7f82a5352700 10 mon.a@0(leader).osd e4 check_sub 0x559cd188ac00 next 0 (onetime) 2016-03-08 15:19:03.577969 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1824291270 -- osd_map(4..4 src has 1..4) v3 -- ?+0 0x559cd17ccd00 con 0x559cd18dfc00 2016-03-08 15:19:03.577972 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message osd_map(4..4 src has 1..4) v3 remote, 127.0.0.1:0/1824291270, have pipe. 2016-03-08 15:19:03.577977 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.577981 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 23/104857600 2016-03-08 15:19:03.577983 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1857600 2016-03-08 15:19:03.577988 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer encoding 4 features 576460752303423487 0x559cd17ccd00 osd_map(4..4 src has 1..4) v3 2016-03-08 15:19:03.577996 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer signed seq # 4): sig = 0 2016-03-08 15:19:03.577999 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sending 4 0x559cd17ccd00 2016-03-08 15:19:03.578021 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.578025 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sleeping 2016-03-08 15:19:03.579936 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got ACK 2016-03-08 15:19:03.579946 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got ack seq 3 2016-03-08 15:19:03.579959 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader reading tag... 2016-03-08 15:19:03.579962 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got ACK 2016-03-08 15:19:03.579964 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got ack seq 4 2016-03-08 15:19:03.579966 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader reading tag... 2016-03-08 15:19:03.579968 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got MSG 2016-03-08 15:19:03.579970 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got envelope type=50 src client.4100 front=80 data=0 off 0 2016-03-08 15:19:03.579973 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader wants 80 bytes from policy throttler 0/104857600 2016-03-08 15:19:03.579977 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader wants 80 from dispatch throttler 0/104857600 2016-03-08 15:19:03.579980 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got front 80 2016-03-08 15:19:03.579983 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).aborted = 0 2016-03-08 15:19:03.579985 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got 80 + 0 + 0 byte message 2016-03-08 15:19:03.579992 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got message 4 0x559cd1556640 mon_command({"prefix": "get_command_descriptions"} v 0) v1 2016-03-08 15:19:03.579996 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1556640 prio 127 2016-03-08 15:19:03.580001 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader reading tag... 2016-03-08 15:19:03.580003 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.580011 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).write_ack 4 2016-03-08 15:19:03.580014 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4100 127.0.0.1:0/1824291270 4 ==== mon_command({"prefix": "get_command_descriptions"} v 0) v1 ==== 80+0+0 (3134233619 0 0) 0x559cd1556640 con 0x559cd18dfc00 2016-03-08 15:19:03.580019 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.580024 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sleeping 2016-03-08 15:19:03.580042 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857000 for client.? 127.0.0.1:0/1824291270 2016-03-08 15:19:03.580044 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.581113 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd18bda80 0x559cd1555680 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:03.581116 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1824291270 -- mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 -- ?+37345 0x559cd1555680 con 0x559cd18dfc00 2016-03-08 15:19:03.581120 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 remote, 127.0.0.1:0/1824291270, have pipe. 2016-03-08 15:19:03.581127 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.581133 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 80 to dispatch throttler 80/104857600 2016-03-08 15:19:03.581136 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1556640 2016-03-08 15:19:03.581133 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer encoding 5 features 576460752303423487 0x559cd1555680 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:03.581147 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer signed seq # 5): sig = 0 2016-03-08 15:19:03.581153 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sending 5 0x559cd1555680 2016-03-08 15:19:03.581184 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.581188 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sleeping 2016-03-08 15:19:03.627699 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got ACK 2016-03-08 15:19:03.627727 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got ack seq 5 2016-03-08 15:19:03.627739 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader reading tag... 2016-03-08 15:19:03.627741 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got MSG 2016-03-08 15:19:03.627743 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got envelope type=50 src client.4100 front=66 data=0 off 0 2016-03-08 15:19:03.627747 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader wants 66 bytes from policy throttler 0/104857600 2016-03-08 15:19:03.627750 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader wants 66 from dispatch throttler 0/104857600 2016-03-08 15:19:03.627755 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got front 66 2016-03-08 15:19:03.627757 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).aborted = 0 2016-03-08 15:19:03.627758 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got 66 + 0 + 0 byte message 2016-03-08 15:19:03.627768 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader got message 5 0x559cd15546c0 mon_command({"prefix": "mon getmap"} v 0) v1 2016-03-08 15:19:03.627772 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd15546c0 prio 127 2016-03-08 15:19:03.627778 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader reading tag... 2016-03-08 15:19:03.627781 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.627801 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).write_ack 5 2016-03-08 15:19:03.627805 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4100 127.0.0.1:0/1824291270 5 ==== mon_command({"prefix": "mon getmap"} v 0) v1 ==== 66+0+0 (1248023798 0 0) 0x559cd15546c0 con 0x559cd18dfc00 2016-03-08 15:19:03.627809 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.627813 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sleeping 2016-03-08 15:19:03.627833 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857000 for client.? 127.0.0.1:0/1824291270 2016-03-08 15:19:03.627836 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.627870 7f82a5352700 0 mon.a@0(leader) e1 handle_command mon_command({"prefix": "mon getmap"} v 0) v1 2016-03-08 15:19:03.627891 7f82a5352700 20 is_capable service=mon command=mon getmap read on cap allow * 2016-03-08 15:19:03.627893 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.627894 7f82a5352700 20 allow all 2016-03-08 15:19:03.627895 7f82a5352700 10 mon.a@0(leader) e1 _allowed_command capable 2016-03-08 15:19:03.627901 7f82a5352700 0 log_channel(audit) log [DBG] : from='client.? 127.0.0.1:0/1824291270' entity='client.bootstrap-osd' cmd=[{"prefix": "mon getmap"}]: dispatch 2016-03-08 15:19:03.627912 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 20 at 2016-03-08 15:19:03.627903) v1 -- ?+0 0x559cd1563700 con 0x559cd1477080 2016-03-08 15:19:03.627918 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 20 at 2016-03-08 15:19:03.627903) v1 local 2016-03-08 15:19:03.627928 7f82a5352700 10 mon.a@0(leader).paxosservice(monmap 1..1) dispatch 0x559cd15546c0 mon_command({"prefix": "mon getmap"} v 0) v1 from client.4100 127.0.0.1:0/1824291270 con 0x559cd18dfc00 2016-03-08 15:19:03.627933 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..18) is_readable = 1 - now=2016-03-08 15:19:03.627933 lease_expire=0.000000 has v0 lc 18 2016-03-08 15:19:03.627963 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd18bdcc0 0x559cd15558c0 mon_command_ack([{"prefix": "mon getmap"}]=0 got monmap epoch 1 v1) v1 2016-03-08 15:19:03.627966 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1824291270 -- mon_command_ack([{"prefix": "mon getmap"}]=0 got monmap epoch 1 v1) v1 -- ?+187 0x559cd15558c0 con 0x559cd18dfc00 2016-03-08 15:19:03.627970 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "mon getmap"}]=0 got monmap epoch 1 v1) v1 remote, 127.0.0.1:0/1824291270, have pipe. 2016-03-08 15:19:03.627976 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.627983 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 66 to dispatch throttler 66/104857600 2016-03-08 15:19:03.627985 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15546c0 2016-03-08 15:19:03.627982 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer encoding 6 features 576460752303423487 0x559cd15558c0 mon_command_ack([{"prefix": "mon getmap"}]=0 got monmap epoch 1 v1) v1 2016-03-08 15:19:03.627989 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 20 at 2016-03-08 15:19:03.627903) v1 ==== 0+0+0 (0 0 0) 0x559cd1563700 con 0x559cd1477080 2016-03-08 15:19:03.627990 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer signed seq # 6): sig = 0 2016-03-08 15:19:03.627996 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sending 6 0x559cd15558c0 2016-03-08 15:19:03.628007 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.628009 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.628014 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..9) dispatch 0x559cd1563700 log(1 entries from seq 20 at 2016-03-08 15:19:03.627903) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:03.628017 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..18) is_readable = 1 - now=2016-03-08 15:19:03.628018 lease_expire=0.000000 has v0 lc 18 2016-03-08 15:19:03.628023 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer: state = open policy.server=1 2016-03-08 15:19:03.628025 7f82a5352700 10 mon.a@0(leader).log v9 preprocess_query log(1 entries from seq 20 at 2016-03-08 15:19:03.627903) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.628031 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer sleeping 2016-03-08 15:19:03.628045 7f82a5352700 10 mon.a@0(leader).log v9 preprocess_log log(1 entries from seq 20 at 2016-03-08 15:19:03.627903) v1 from mon.0 2016-03-08 15:19:03.628048 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:03.628050 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.628051 7f82a5352700 20 allow all 2016-03-08 15:19:03.628068 7f82a5352700 10 mon.a@0(leader).log v9 prepare_update log(1 entries from seq 20 at 2016-03-08 15:19:03.627903) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.628077 7f82a5352700 10 mon.a@0(leader).log v9 prepare_log log(1 entries from seq 20 at 2016-03-08 15:19:03.627903) v1 from mon.0 2016-03-08 15:19:03.628080 7f82a5352700 10 mon.a@0(leader).log v9 logging 2016-03-08 15:19:03.627903 mon.0 127.0.0.1:7104/0 20 : audit [DBG] from='client.? 127.0.0.1:0/1824291270' entity='client.bootstrap-osd' cmd=[{"prefix": "mon getmap"}]: dispatch 2016-03-08 15:19:03.628088 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..9) setting proposal_timer 0x559cd1494530 with delay of 0.045325 2016-03-08 15:19:03.628099 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1563700 2016-03-08 15:19:03.629550 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader couldn't read tag, (0) Success 2016-03-08 15:19:03.629566 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).fault (0) Success 2016-03-08 15:19:03.629597 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).fault on lossy channel, failing 2016-03-08 15:19:03.629602 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfc00).stop 2016-03-08 15:19:03.629608 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfc00).unregister_pipe 2016-03-08 15:19:03.629611 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfc00).discard_queue 2016-03-08 15:19:03.629618 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfc00).reader done 2016-03-08 15:19:03.629621 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer finishing 2016-03-08 15:19:03.629624 7f82a5352700 10 mon.a@0(leader) e1 ms_handle_reset 0x559cd18dfc00 127.0.0.1:0/1824291270 2016-03-08 15:19:03.629630 7f82a334e700 10 -- 127.0.0.1:7104/0 queue_reap 0x559cd17aa000 2016-03-08 15:19:03.629635 7f82a5352700 10 mon.a@0(leader) e1 reset/close on session client.? 127.0.0.1:0/1824291270 2016-03-08 15:19:03.629638 7f82a5352700 10 mon.a@0(leader) e1 remove_session 0x559cd1857000 client.? 127.0.0.1:0/1824291270 2016-03-08 15:19:03.629638 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfc00).writer done 2016-03-08 15:19:03.629642 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper 2016-03-08 15:19:03.629644 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaping pipe 0x559cd17aa000 127.0.0.1:0/1824291270 2016-03-08 15:19:03.629646 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfc00).discard_queue 2016-03-08 15:19:03.629649 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfc00).unregister_pipe - not registered 2016-03-08 15:19:03.629652 7f82a6354700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1824291270 pipe(0x559cd17aa000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfc00).join 2016-03-08 15:19:03.629678 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaped pipe 0x559cd17aa000 127.0.0.1:0/1824291270 2016-03-08 15:19:03.629685 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper deleted pipe 0x559cd17aa000 2016-03-08 15:19:03.629687 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper done 2016-03-08 15:19:03.673501 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..9) propose_pending 2016-03-08 15:19:03.673515 7f82a5b53700 10 mon.a@0(leader).log v9 encode_full log v 9 2016-03-08 15:19:03.673541 7f82a5b53700 10 mon.a@0(leader).log v9 encode_pending v10 2016-03-08 15:19:03.673551 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..18) queue_pending_finisher 0x559cd1494390 2016-03-08 15:19:03.673553 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..18) trigger_propose active, proposing now 2016-03-08 15:19:03.673558 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..18) propose_pending 19 5734 bytes 2016-03-08 15:19:03.673559 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..18) begin for 19 5734 bytes 2016-03-08 15:19:03.701518 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..18) commit_start 19 2016-03-08 15:19:03.723856 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..18) commit_finish 19 2016-03-08 15:19:03.723889 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:03.723905 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) refresh 2016-03-08 15:19:03.723918 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:03.723925 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) refresh 2016-03-08 15:19:03.723933 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..10) refresh 2016-03-08 15:19:03.723935 7f82a6b55700 10 mon.a@0(leader).log v10 update_from_paxos 2016-03-08 15:19:03.723936 7f82a6b55700 10 mon.a@0(leader).log v10 update_from_paxos version 10 summary v 9 2016-03-08 15:19:03.723941 7f82a6b55700 10 mon.a@0(leader).log v10 update_from_paxos latest full 9 2016-03-08 15:19:03.723948 7f82a6b55700 7 mon.a@0(leader).log v10 update_from_paxos applying incremental log 10 2016-03-08 15:19:03.627903 mon.0 127.0.0.1:7104/0 20 : audit [DBG] from='client.? 127.0.0.1:0/1824291270' entity='client.bootstrap-osd' cmd=[{"prefix": "mon getmap"}]: dispatch 2016-03-08 15:19:03.723972 7f82a6b55700 20 mon.a@0(leader).log v10 update_from_paxos logging for channel 'audit' to file 'testdir/osd-crush/log' 2016-03-08 15:19:03.723975 7f82a6b55700 15 mon.a@0(leader).log v10 update_from_paxos logging for 0 channels 2016-03-08 15:19:03.723977 7f82a6b55700 10 mon.a@0(leader).log v10 check_subs 2016-03-08 15:19:03.723987 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:03.723994 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) refresh 2016-03-08 15:19:03.723995 7f82a6b55700 10 mon.a@0(leader).auth v2 update_from_paxos 2016-03-08 15:19:03.723997 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) post_refresh 2016-03-08 15:19:03.723998 7f82a6b55700 10 mon.a@0(leader).pg v5 post_paxos_update 2016-03-08 15:19:03.723999 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:03.724000 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) post_refresh 2016-03-08 15:19:03.724001 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..10) post_refresh 2016-03-08 15:19:03.724001 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:03.724002 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..2) post_refresh 2016-03-08 15:19:03.724003 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..19) commit_proposal 2016-03-08 15:19:03.724005 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..10) _active - not active 2016-03-08 15:19:03.724007 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..19) finish_round 2016-03-08 15:19:03.724008 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..19) finish_round waiting_for_acting 2016-03-08 15:19:03.724009 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..10) _active 2016-03-08 15:19:03.724010 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..10) remove_legacy_versions 2016-03-08 15:19:03.724014 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..10) _active creating new pending 2016-03-08 15:19:03.724019 7f82a6b55700 10 mon.a@0(leader).log v10 create_pending v 11 2016-03-08 15:19:03.724030 7f82a6b55700 7 mon.a@0(leader).log v10 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.724037 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd191c240 0x559cd1555200 log(last 20) v1 2016-03-08 15:19:03.724040 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 20) v1 -- ?+0 0x559cd1555200 con 0x559cd1477080 2016-03-08 15:19:03.724043 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 20) v1 local 2016-03-08 15:19:03.724064 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..19) finish_round waiting_for_readable 2016-03-08 15:19:03.724067 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..19) finish_round waiting_for_writeable 2016-03-08 15:19:03.724068 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..19) finish_round done w/ waiters, state 1 2016-03-08 15:19:03.724066 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 20) v1 ==== 0+0+0 (0 0 0) 0x559cd1555200 con 0x559cd1477080 2016-03-08 15:19:03.724092 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:03.724096 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:03.724100 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:03.724102 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:03.724103 7f82a5352700 20 allow all 2016-03-08 15:19:03.724115 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555200 2016-03-08 15:19:04.077872 7f82a4350700 20 accepter.accepter poll got 1 2016-03-08 15:19:04.077884 7f82a4350700 10 accepter.pfd.revents=1 2016-03-08 15:19:04.077892 7f82a4350700 10 accepter.accepted incoming on sd 21 2016-03-08 15:19:04.077922 7f82a4350700 20 accepter.accepter calling poll 2016-03-08 15:19:04.077937 7f82aea26700 10 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd17ab400 sd=21 :0 s=0 pgs=0 cs=0 l=0 c=0x559cd18dfd80).accept 2016-03-08 15:19:04.078011 7f82aea26700 1 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd18dfd80).accept sd=21 127.0.0.1:52226/0 2016-03-08 15:19:04.078084 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd18dfd80).accept peer addr is 127.0.0.1:0/2362445327 2016-03-08 15:19:04.078096 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd18dfd80).accept got peer connect_seq 0 global_seq 1 2016-03-08 15:19:04.078099 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd18dfd80).accept of host_type 8, policy.lossy=1 policy.server=1 policy.standby=0 policy.resetcheck=0 2016-03-08 15:19:04.078102 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd18dfd80).accept my proto 15, their proto 15 2016-03-08 15:19:04.078105 7f82aea26700 10 mon.a@0(leader) e1 ms_verify_authorizer 127.0.0.1:0/2362445327 client protocol 0 2016-03-08 15:19:04.078109 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd18dfd80).accept: setting up session_security. 2016-03-08 15:19:04.078111 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd18dfd80).accept new session 2016-03-08 15:19:04.078114 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).accept success, connect_seq = 1, sending READY 2016-03-08 15:19:04.078116 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).accept features 576460752303423487 2016-03-08 15:19:04.078123 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).register_pipe 2016-03-08 15:19:04.078137 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).discard_requeued_up_to 0 2016-03-08 15:19:04.078140 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).accept starting writer, state open 2016-03-08 15:19:04.078156 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).accept done 2016-03-08 15:19:04.078161 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading tag... 2016-03-08 15:19:04.078176 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got KEEPALIVE2 2016-03-08 15:19:04.078163 2016-03-08 15:19:04.078188 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading tag... 2016-03-08 15:19:04.078193 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078205 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got MSG 2016-03-08 15:19:04.078208 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got envelope type=17 src client.? front=68 data=0 off 0 2016-03-08 15:19:04.078204 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).write_keepalive2 15 2016-03-08 15:19:04.078163 2016-03-08 15:19:04.078211 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader wants 68 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.078215 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader wants 68 from dispatch throttler 0/104857600 2016-03-08 15:19:04.078219 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got front 68 2016-03-08 15:19:04.078221 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).aborted = 0 2016-03-08 15:19:04.078224 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got 68 + 0 + 0 byte message 2016-03-08 15:19:04.078229 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078233 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sleeping 2016-03-08 15:19:04.078242 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got message 1 0x559cd1561e00 auth(proto 0 38 bytes epoch 0) v1 2016-03-08 15:19:04.078247 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1561e00 prio 127 2016-03-08 15:19:04.078253 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading tag... 2016-03-08 15:19:04.078256 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078262 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).write_ack 1 2016-03-08 15:19:04.078258 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/2362445327 1 ==== auth(proto 0 38 bytes epoch 0) v1 ==== 68+0+0 (424667196 0 0) 0x559cd1561e00 con 0x559cd18dfd80 2016-03-08 15:19:04.078277 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078281 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sleeping 2016-03-08 15:19:04.078293 7f82a5352700 10 mon.a@0(leader) e1 _ms_dispatch new session 0x559cd1857200 MonSession(client.? 127.0.0.1:0/2362445327 is open) 2016-03-08 15:19:04.078298 7f82a5352700 20 mon.a@0(leader) e1 caps 2016-03-08 15:19:04.078304 7f82a5352700 10 mon.a@0(leader).paxosservice(auth 1..2) dispatch 0x559cd1561e00 auth(proto 0 38 bytes epoch 0) v1 from client.? 127.0.0.1:0/2362445327 con 0x559cd18dfd80 2016-03-08 15:19:04.078309 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..19) is_readable = 1 - now=2016-03-08 15:19:04.078309 lease_expire=0.000000 has v0 lc 19 2016-03-08 15:19:04.078315 7f82a5352700 10 mon.a@0(leader).auth v2 preprocess_query auth(proto 0 38 bytes epoch 0) v1 from client.? 127.0.0.1:0/2362445327 2016-03-08 15:19:04.078318 7f82a5352700 10 mon.a@0(leader).auth v2 prep_auth() blob_size=38 2016-03-08 15:19:04.078324 7f82a5352700 10 mon.a@0(leader).auth v2 AuthMonitor::assign_global_id m=auth(proto 0 38 bytes epoch 0) v1 mon=0/1 last_allocated=4100 max_global_id=14096 2016-03-08 15:19:04.078326 7f82a5352700 10 mon.a@0(leader).auth v2 next_global_id should be 4101 2016-03-08 15:19:04.078330 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2362445327 -- mon_map magic: 0 v1 -- ?+0 0x559cd1555200 con 0x559cd18dfd80 2016-03-08 15:19:04.078333 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/2362445327, have pipe. 2016-03-08 15:19:04.078340 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078348 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd191c900 0x559cd1561900 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:04.078350 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer encoding 1 features 576460752303423487 0x559cd1555200 mon_map magic: 0 v1 2016-03-08 15:19:04.078356 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2362445327 -- auth_reply(proto 1 0 (0) Success) v1 -- ?+0 0x559cd1561900 con 0x559cd18dfd80 2016-03-08 15:19:04.078362 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer signed seq # 1): sig = 0 2016-03-08 15:19:04.078369 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sending 1 0x559cd1555200 2016-03-08 15:19:04.078372 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message auth_reply(proto 1 0 (0) Success) v1 remote, 127.0.0.1:0/2362445327, have pipe. 2016-03-08 15:19:04.078394 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 68 to dispatch throttler 68/104857600 2016-03-08 15:19:04.078393 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078400 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1561e00 2016-03-08 15:19:04.078404 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer encoding 2 features 576460752303423487 0x559cd1561900 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:04.078415 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer signed seq # 2): sig = 0 2016-03-08 15:19:04.078417 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sending 2 0x559cd1561900 2016-03-08 15:19:04.078430 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078434 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sleeping 2016-03-08 15:19:04.078523 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got ACK 2016-03-08 15:19:04.078533 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got ack seq 1 2016-03-08 15:19:04.078536 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading tag... 2016-03-08 15:19:04.078538 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got ACK 2016-03-08 15:19:04.078540 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got ack seq 2 2016-03-08 15:19:04.078542 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading tag... 2016-03-08 15:19:04.078544 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got MSG 2016-03-08 15:19:04.078546 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got envelope type=15 src client.? front=23 data=0 off 0 2016-03-08 15:19:04.078549 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader wants 23 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.078552 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader wants 23 from dispatch throttler 0/104857600 2016-03-08 15:19:04.078556 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got front 23 2016-03-08 15:19:04.078558 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).aborted = 0 2016-03-08 15:19:04.078560 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:04.078568 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got message 2 0x559cd1857800 mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:04.078572 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1857800 prio 127 2016-03-08 15:19:04.078576 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading tag... 2016-03-08 15:19:04.078578 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078580 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/2362445327 2 ==== mon_subscribe({monmap=0+}) v2 ==== 23+0+0 (1620593354 0 0) 0x559cd1857800 con 0x559cd18dfd80 2016-03-08 15:19:04.078585 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).write_ack 2 2016-03-08 15:19:04.078595 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got MSG 2016-03-08 15:19:04.078599 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078601 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got envelope type=15 src client.4101 front=23 data=0 off 0 2016-03-08 15:19:04.078602 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857200 for client.? 127.0.0.1:0/2362445327 2016-03-08 15:19:04.078603 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sleeping 2016-03-08 15:19:04.078609 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.078604 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader wants 23 bytes from policy throttler 23/104857600 2016-03-08 15:19:04.078611 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader wants 23 from dispatch throttler 23/104857600 2016-03-08 15:19:04.078613 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.078614 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.078615 7f82a5352700 20 allow all 2016-03-08 15:19:04.078614 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got front 23 2016-03-08 15:19:04.078616 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:04.078616 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).aborted = 0 2016-03-08 15:19:04.078618 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:04.078621 7f82a5352700 10 mon.a@0(leader) e1 check_sub monmap next 0 have 1 2016-03-08 15:19:04.078623 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got message 3 0x559cd1857a00 mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:04.078624 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2362445327 -- mon_map magic: 0 v1 -- ?+0 0x559cd15546c0 con 0x559cd18dfd80 2016-03-08 15:19:04.078626 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1857a00 prio 127 2016-03-08 15:19:04.078630 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading tag... 2016-03-08 15:19:04.078632 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078639 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/2362445327, have pipe. 2016-03-08 15:19:04.078638 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).write_ack 3 2016-03-08 15:19:04.078647 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 46/104857600 2016-03-08 15:19:04.078649 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1857800 2016-03-08 15:19:04.078645 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer encoding 3 features 576460752303423487 0x559cd15546c0 mon_map magic: 0 v1 2016-03-08 15:19:04.078652 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4101 127.0.0.1:0/2362445327 3 ==== mon_subscribe({osdmap=0}) v2 ==== 23+0+0 (319581110 0 0) 0x559cd1857a00 con 0x559cd18dfd80 2016-03-08 15:19:04.078656 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer signed seq # 3): sig = 0 2016-03-08 15:19:04.078661 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sending 3 0x559cd15546c0 2016-03-08 15:19:04.078668 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857200 for client.? 127.0.0.1:0/2362445327 2016-03-08 15:19:04.078670 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.078673 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.078674 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.078674 7f82a5352700 20 allow all 2016-03-08 15:19:04.078675 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:04.078677 7f82a5352700 20 is_capable service=osd command= read on cap allow * 2016-03-08 15:19:04.078677 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.078678 7f82a5352700 20 allow all 2016-03-08 15:19:04.078679 7f82a5352700 10 mon.a@0(leader).osd e4 check_sub 0x559cd188a960 next 0 (onetime) 2016-03-08 15:19:04.078683 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2362445327 -- osd_map(4..4 src has 1..4) v3 -- ?+0 0x559cd1561e00 con 0x559cd18dfd80 2016-03-08 15:19:04.078682 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078687 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sleeping 2016-03-08 15:19:04.078695 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message osd_map(4..4 src has 1..4) v3 remote, 127.0.0.1:0/2362445327, have pipe. 2016-03-08 15:19:04.078704 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078709 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 23/104857600 2016-03-08 15:19:04.078712 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1857a00 2016-03-08 15:19:04.078709 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer encoding 4 features 576460752303423487 0x559cd1561e00 osd_map(4..4 src has 1..4) v3 2016-03-08 15:19:04.078719 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer signed seq # 4): sig = 0 2016-03-08 15:19:04.078731 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sending 4 0x559cd1561e00 2016-03-08 15:19:04.078759 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.078763 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sleeping 2016-03-08 15:19:04.080378 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got ACK 2016-03-08 15:19:04.080385 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got ack seq 3 2016-03-08 15:19:04.080388 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading tag... 2016-03-08 15:19:04.080390 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got ACK 2016-03-08 15:19:04.080392 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got ack seq 4 2016-03-08 15:19:04.080393 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading tag... 2016-03-08 15:19:04.080395 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got MSG 2016-03-08 15:19:04.080397 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got envelope type=50 src client.4101 front=80 data=0 off 0 2016-03-08 15:19:04.080400 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader wants 80 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.080403 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader wants 80 from dispatch throttler 0/104857600 2016-03-08 15:19:04.080406 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got front 80 2016-03-08 15:19:04.080408 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).aborted = 0 2016-03-08 15:19:04.080410 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got 80 + 0 + 0 byte message 2016-03-08 15:19:04.080417 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got message 4 0x559cd1555200 mon_command({"prefix": "get_command_descriptions"} v 0) v1 2016-03-08 15:19:04.080420 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1555200 prio 127 2016-03-08 15:19:04.080425 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading tag... 2016-03-08 15:19:04.080428 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4101 127.0.0.1:0/2362445327 4 ==== mon_command({"prefix": "get_command_descriptions"} v 0) v1 ==== 80+0+0 (3134233619 0 0) 0x559cd1555200 con 0x559cd18dfd80 2016-03-08 15:19:04.080428 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.080438 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).write_ack 4 2016-03-08 15:19:04.080447 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.080452 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sleeping 2016-03-08 15:19:04.080462 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857200 for client.? 127.0.0.1:0/2362445327 2016-03-08 15:19:04.080465 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.081515 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd191d0e0 0x559cd1556640 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:04.081517 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2362445327 -- mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 -- ?+37345 0x559cd1556640 con 0x559cd18dfd80 2016-03-08 15:19:04.081521 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 remote, 127.0.0.1:0/2362445327, have pipe. 2016-03-08 15:19:04.081532 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 80 to dispatch throttler 80/104857600 2016-03-08 15:19:04.081534 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555200 2016-03-08 15:19:04.081533 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.081545 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer encoding 5 features 576460752303423487 0x559cd1556640 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:04.081560 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer signed seq # 5): sig = 0 2016-03-08 15:19:04.081566 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sending 5 0x559cd1556640 2016-03-08 15:19:04.081605 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.081611 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sleeping 2016-03-08 15:19:04.128565 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got ACK 2016-03-08 15:19:04.128582 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got ack seq 5 2016-03-08 15:19:04.128595 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading tag... 2016-03-08 15:19:04.128597 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got MSG 2016-03-08 15:19:04.128599 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got envelope type=50 src client.4101 front=139 data=56 off 0 2016-03-08 15:19:04.128603 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader wants 195 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.128609 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader wants 195 from dispatch throttler 0/104857600 2016-03-08 15:19:04.128613 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got front 139 2016-03-08 15:19:04.128616 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader allocating new rx buffer at offset 0 2016-03-08 15:19:04.128620 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading nonblocking into 0x559cd14a9720 len 56 2016-03-08 15:19:04.128623 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).aborted = 0 2016-03-08 15:19:04.128625 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got 139 + 0 + 56 byte message 2016-03-08 15:19:04.128635 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader got message 5 0x559cd15546c0 mon_command({"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]} v 0) v1 2016-03-08 15:19:04.128639 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd15546c0 prio 127 2016-03-08 15:19:04.128645 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader reading tag... 2016-03-08 15:19:04.128658 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.128664 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4101 127.0.0.1:0/2362445327 5 ==== mon_command({"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]} v 0) v1 ==== 139+0+56 (55485594 0 1196323015) 0x559cd15546c0 con 0x559cd18dfd80 2016-03-08 15:19:04.128668 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).write_ack 5 2016-03-08 15:19:04.128686 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.128690 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sleeping 2016-03-08 15:19:04.128705 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857200 for client.? 127.0.0.1:0/2362445327 2016-03-08 15:19:04.128709 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.128748 7f82a5352700 0 mon.a@0(leader) e1 handle_command mon_command({"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]} v 0) v1 2016-03-08 15:19:04.128772 7f82a5352700 20 is_capable service=auth command=auth add read write exec on cap allow * 2016-03-08 15:19:04.128774 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.128775 7f82a5352700 20 allow all 2016-03-08 15:19:04.128776 7f82a5352700 10 mon.a@0(leader) e1 _allowed_command capable 2016-03-08 15:19:04.128783 7f82a5352700 0 log_channel(audit) log [INF] : from='client.? 127.0.0.1:0/2362445327' entity='client.bootstrap-osd' cmd=[{"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]}]: dispatch 2016-03-08 15:19:04.128868 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 21 at 2016-03-08 15:19:04.128784) v1 -- ?+0 0x559cd1562580 con 0x559cd1477080 2016-03-08 15:19:04.128873 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 21 at 2016-03-08 15:19:04.128784) v1 local 2016-03-08 15:19:04.128881 7f82a5352700 10 mon.a@0(leader).paxosservice(auth 1..2) dispatch 0x559cd15546c0 mon_command({"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]} v 0) v1 from client.4101 127.0.0.1:0/2362445327 con 0x559cd18dfd80 2016-03-08 15:19:04.128889 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..19) is_readable = 1 - now=2016-03-08 15:19:04.128889 lease_expire=0.000000 has v0 lc 19 2016-03-08 15:19:04.128894 7f82a5352700 10 mon.a@0(leader).auth v2 preprocess_query mon_command({"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]} v 0) v1 from client.4101 127.0.0.1:0/2362445327 2016-03-08 15:19:04.128911 7f82a5352700 10 mon.a@0(leader).auth v2 prepare_update mon_command({"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]} v 0) v1 from client.4101 127.0.0.1:0/2362445327 2016-03-08 15:19:04.129002 7f82a5352700 10 mon.a@0(leader).auth v2 importing osd.0 2016-03-08 15:19:04.129030 7f82a5352700 10 mon.a@0(leader).paxosservice(auth 1..2) propose_pending 2016-03-08 15:19:04.129054 7f82a5352700 10 mon.a@0(leader).auth v2 encode_pending v 3 2016-03-08 15:19:04.129063 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..19) queue_pending_finisher 0x559cd1494540 2016-03-08 15:19:04.129065 7f82a5352700 10 mon.a@0(leader).paxos(paxos active c 1..19) trigger_propose active, proposing now 2016-03-08 15:19:04.129074 7f82a5352700 10 mon.a@0(leader).paxos(paxos active c 1..19) propose_pending 20 280 bytes 2016-03-08 15:19:04.129077 7f82a5352700 10 mon.a@0(leader).paxos(paxos updating c 1..19) begin for 20 280 bytes 2016-03-08 15:19:04.136932 7f82a5352700 10 mon.a@0(leader).paxos(paxos updating c 1..19) commit_start 20 2016-03-08 15:19:04.137005 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 195 to dispatch throttler 195/104857600 2016-03-08 15:19:04.137013 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15546c0 2016-03-08 15:19:04.137019 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 21 at 2016-03-08 15:19:04.128784) v1 ==== 0+0+0 (0 0 0) 0x559cd1562580 con 0x559cd1477080 2016-03-08 15:19:04.137053 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.137058 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.137074 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..10) dispatch 0x559cd1562580 log(1 entries from seq 21 at 2016-03-08 15:19:04.128784) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.137080 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..19) is_readable = 1 - now=2016-03-08 15:19:04.137080 lease_expire=0.000000 has v0 lc 19 2016-03-08 15:19:04.137101 7f82a5352700 10 mon.a@0(leader).log v10 preprocess_query log(1 entries from seq 21 at 2016-03-08 15:19:04.128784) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.137109 7f82a5352700 10 mon.a@0(leader).log v10 preprocess_log log(1 entries from seq 21 at 2016-03-08 15:19:04.128784) v1 from mon.0 2016-03-08 15:19:04.137112 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.137114 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.137115 7f82a5352700 20 allow all 2016-03-08 15:19:04.137122 7f82a5352700 10 mon.a@0(leader).log v10 prepare_update log(1 entries from seq 21 at 2016-03-08 15:19:04.128784) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.137129 7f82a5352700 10 mon.a@0(leader).log v10 prepare_log log(1 entries from seq 21 at 2016-03-08 15:19:04.128784) v1 from mon.0 2016-03-08 15:19:04.137132 7f82a5352700 10 mon.a@0(leader).log v10 logging 2016-03-08 15:19:04.128784 mon.0 127.0.0.1:7104/0 21 : audit [INF] from='client.? 127.0.0.1:0/2362445327' entity='client.bootstrap-osd' cmd=[{"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]}]: dispatch 2016-03-08 15:19:04.137141 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..10) setting proposal_timer 0x559cd14945a0 with delay of 0.05 2016-03-08 15:19:04.137152 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1562580 2016-03-08 15:19:04.143429 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..19) commit_finish 20 2016-03-08 15:19:04.143455 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:04.143470 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) refresh 2016-03-08 15:19:04.143479 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:04.143487 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) refresh 2016-03-08 15:19:04.143496 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..10) refresh 2016-03-08 15:19:04.143498 7f82a6b55700 10 mon.a@0(leader).log v10 update_from_paxos 2016-03-08 15:19:04.143500 7f82a6b55700 10 mon.a@0(leader).log v10 update_from_paxos version 10 summary v 10 2016-03-08 15:19:04.143509 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:04.143517 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) refresh 2016-03-08 15:19:04.143522 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos 2016-03-08 15:19:04.143527 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos version 3 keys ver 2 latest 1 2016-03-08 15:19:04.143529 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos key server version 2 2016-03-08 15:19:04.143535 7f82a6b55700 20 mon.a@0(leader).auth v3 update_from_paxos walking through version 3 len 128 2016-03-08 15:19:04.143572 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos() last_allocated_id=4101 max_global_id=14096 format_version 1 2016-03-08 15:19:04.143575 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) post_refresh 2016-03-08 15:19:04.143576 7f82a6b55700 10 mon.a@0(leader).pg v5 post_paxos_update 2016-03-08 15:19:04.143578 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:04.143578 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) post_refresh 2016-03-08 15:19:04.143579 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..10) post_refresh 2016-03-08 15:19:04.143580 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:04.143580 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) post_refresh 2016-03-08 15:19:04.143581 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..20) commit_proposal 2016-03-08 15:19:04.143584 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) _active - not active 2016-03-08 15:19:04.143585 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..20) finish_round 2016-03-08 15:19:04.143586 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..20) finish_round waiting_for_acting 2016-03-08 15:19:04.143587 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) _active 2016-03-08 15:19:04.143588 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) remove_legacy_versions 2016-03-08 15:19:04.143593 7f82a6b55700 7 mon.a@0(leader).paxosservice(auth 1..3) _active creating new pending 2016-03-08 15:19:04.143595 7f82a6b55700 10 mon.a@0(leader).auth v3 create_pending v 4 2016-03-08 15:19:04.143612 7f82a6b55700 0 log_channel(audit) log [INF] : from='client.? 127.0.0.1:0/2362445327' entity='client.bootstrap-osd' cmd='[{"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]}]': finished 2016-03-08 15:19:04.143625 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 22 at 2016-03-08 15:19:04.143613) v1 -- ?+0 0x559cd17cca80 con 0x559cd1477080 2016-03-08 15:19:04.143632 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 22 at 2016-03-08 15:19:04.143613) v1 local 2016-03-08 15:19:04.143641 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd191d320 0x559cd1557180 mon_command_ack([{"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]}]=0 added key for osd.0 v3) v1 2016-03-08 15:19:04.143645 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2362445327 -- mon_command_ack([{"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]}]=0 added key for osd.0 v3) v1 -- ?+0 0x559cd1557180 con 0x559cd18dfd80 2016-03-08 15:19:04.143648 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]}]=0 added key for osd.0 v3) v1 remote, 127.0.0.1:0/2362445327, have pipe. 2016-03-08 15:19:04.143665 7f82a6b55700 20 mon.a@0(leader).auth v3 upgrade_format format 1 is current 2016-03-08 15:19:04.143668 7f82a6b55700 10 mon.a@0(leader).auth v3 AuthMonitor::on_active() 2016-03-08 15:19:04.143659 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.143671 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..20) finish_round waiting_for_readable 2016-03-08 15:19:04.143673 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..20) finish_round waiting_for_writeable 2016-03-08 15:19:04.143674 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..20) finish_round done w/ waiters, state 1 2016-03-08 15:19:04.143676 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer encoding 6 features 576460752303423487 0x559cd1557180 mon_command_ack([{"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]}]=0 added key for osd.0 v3) v1 2016-03-08 15:19:04.143682 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 22 at 2016-03-08 15:19:04.143613) v1 ==== 0+0+0 (0 0 0) 0x559cd17cca80 con 0x559cd1477080 2016-03-08 15:19:04.143691 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer signed seq # 6): sig = 0 2016-03-08 15:19:04.143697 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sending 6 0x559cd1557180 2016-03-08 15:19:04.143727 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.143732 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.143744 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..10) dispatch 0x559cd17cca80 log(1 entries from seq 22 at 2016-03-08 15:19:04.143613) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.143749 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..20) is_readable = 1 - now=2016-03-08 15:19:04.143750 lease_expire=0.000000 has v0 lc 20 2016-03-08 15:19:04.143749 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer: state = open policy.server=1 2016-03-08 15:19:04.143755 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer sleeping 2016-03-08 15:19:04.143756 7f82a5352700 10 mon.a@0(leader).log v10 preprocess_query log(1 entries from seq 22 at 2016-03-08 15:19:04.143613) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.143767 7f82a5352700 10 mon.a@0(leader).log v10 preprocess_log log(1 entries from seq 22 at 2016-03-08 15:19:04.143613) v1 from mon.0 2016-03-08 15:19:04.143770 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.143771 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.143772 7f82a5352700 20 allow all 2016-03-08 15:19:04.143777 7f82a5352700 10 mon.a@0(leader).log v10 prepare_update log(1 entries from seq 22 at 2016-03-08 15:19:04.143613) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.143786 7f82a5352700 10 mon.a@0(leader).log v10 prepare_log log(1 entries from seq 22 at 2016-03-08 15:19:04.143613) v1 from mon.0 2016-03-08 15:19:04.143788 7f82a5352700 10 mon.a@0(leader).log v10 logging 2016-03-08 15:19:04.143613 mon.0 127.0.0.1:7104/0 22 : audit [INF] from='client.? 127.0.0.1:0/2362445327' entity='client.bootstrap-osd' cmd='[{"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]}]': finished 2016-03-08 15:19:04.143798 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..10) proposal_timer already set 2016-03-08 15:19:04.143800 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17cca80 2016-03-08 15:19:04.161288 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader couldn't read tag, (0) Success 2016-03-08 15:19:04.161316 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).fault (0) Success 2016-03-08 15:19:04.161349 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).fault on lossy channel, failing 2016-03-08 15:19:04.161354 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd18dfd80).stop 2016-03-08 15:19:04.161359 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfd80).unregister_pipe 2016-03-08 15:19:04.161363 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfd80).discard_queue 2016-03-08 15:19:04.161371 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfd80).reader done 2016-03-08 15:19:04.161375 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer finishing 2016-03-08 15:19:04.161378 7f82a5352700 10 mon.a@0(leader) e1 ms_handle_reset 0x559cd18dfd80 127.0.0.1:0/2362445327 2016-03-08 15:19:04.161392 7f82a5352700 10 mon.a@0(leader) e1 reset/close on session client.? 127.0.0.1:0/2362445327 2016-03-08 15:19:04.161392 7f82a334e700 10 -- 127.0.0.1:7104/0 queue_reap 0x559cd17ab400 2016-03-08 15:19:04.161396 7f82a5352700 10 mon.a@0(leader) e1 remove_session 0x559cd1857200 client.? 127.0.0.1:0/2362445327 2016-03-08 15:19:04.161399 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfd80).writer done 2016-03-08 15:19:04.161407 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper 2016-03-08 15:19:04.161410 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaping pipe 0x559cd17ab400 127.0.0.1:0/2362445327 2016-03-08 15:19:04.161412 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfd80).discard_queue 2016-03-08 15:19:04.161416 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfd80).unregister_pipe - not registered 2016-03-08 15:19:04.161419 7f82a6354700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2362445327 pipe(0x559cd17ab400 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd18dfd80).join 2016-03-08 15:19:04.161468 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaped pipe 0x559cd17ab400 127.0.0.1:0/2362445327 2016-03-08 15:19:04.161486 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper deleted pipe 0x559cd17ab400 2016-03-08 15:19:04.161489 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper done 2016-03-08 15:19:04.187226 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..10) propose_pending 2016-03-08 15:19:04.187240 7f82a5b53700 10 mon.a@0(leader).log v10 encode_full log v 10 2016-03-08 15:19:04.187278 7f82a5b53700 10 mon.a@0(leader).log v10 encode_pending v11 2016-03-08 15:19:04.187283 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..20) queue_pending_finisher 0x559cd1494530 2016-03-08 15:19:04.187285 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..20) trigger_propose active, proposing now 2016-03-08 15:19:04.187289 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..20) propose_pending 21 6465 bytes 2016-03-08 15:19:04.187291 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..20) begin for 21 6465 bytes 2016-03-08 15:19:04.193458 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..20) commit_start 21 2016-03-08 15:19:04.199981 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..20) commit_finish 21 2016-03-08 15:19:04.200024 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:04.200046 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) refresh 2016-03-08 15:19:04.200054 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:04.200060 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) refresh 2016-03-08 15:19:04.200066 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..11) refresh 2016-03-08 15:19:04.200068 7f82a6b55700 10 mon.a@0(leader).log v11 update_from_paxos 2016-03-08 15:19:04.200069 7f82a6b55700 10 mon.a@0(leader).log v11 update_from_paxos version 11 summary v 10 2016-03-08 15:19:04.200072 7f82a6b55700 10 mon.a@0(leader).log v11 update_from_paxos latest full 10 2016-03-08 15:19:04.200078 7f82a6b55700 7 mon.a@0(leader).log v11 update_from_paxos applying incremental log 11 2016-03-08 15:19:04.128784 mon.0 127.0.0.1:7104/0 21 : audit [INF] from='client.? 127.0.0.1:0/2362445327' entity='client.bootstrap-osd' cmd=[{"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]}]: dispatch 2016-03-08 15:19:04.200092 7f82a6b55700 20 mon.a@0(leader).log v11 update_from_paxos logging for channel 'audit' to file 'testdir/osd-crush/log' 2016-03-08 15:19:04.200100 7f82a6b55700 7 mon.a@0(leader).log v11 update_from_paxos applying incremental log 11 2016-03-08 15:19:04.143613 mon.0 127.0.0.1:7104/0 22 : audit [INF] from='client.? 127.0.0.1:0/2362445327' entity='client.bootstrap-osd' cmd='[{"prefix": "auth add", "entity": "osd.0", "caps": ["osd", "allow *", "mon", "allow profile osd"]}]': finished 2016-03-08 15:19:04.200104 7f82a6b55700 20 mon.a@0(leader).log v11 update_from_paxos logging for channel 'audit' to file 'testdir/osd-crush/log' 2016-03-08 15:19:04.200107 7f82a6b55700 15 mon.a@0(leader).log v11 update_from_paxos logging for 1 channels 2016-03-08 15:19:04.200108 7f82a6b55700 15 mon.a@0(leader).log v11 update_from_paxos channel 'audit' logging 502 bytes 2016-03-08 15:19:04.200122 7f82a6b55700 10 mon.a@0(leader).log v11 check_subs 2016-03-08 15:19:04.200133 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:04.200140 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) refresh 2016-03-08 15:19:04.200142 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos 2016-03-08 15:19:04.200143 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) post_refresh 2016-03-08 15:19:04.200144 7f82a6b55700 10 mon.a@0(leader).pg v5 post_paxos_update 2016-03-08 15:19:04.200145 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:04.200146 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..4) post_refresh 2016-03-08 15:19:04.200146 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..11) post_refresh 2016-03-08 15:19:04.200147 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:04.200148 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) post_refresh 2016-03-08 15:19:04.200149 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..21) commit_proposal 2016-03-08 15:19:04.200150 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..11) _active - not active 2016-03-08 15:19:04.200152 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..21) finish_round 2016-03-08 15:19:04.200153 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..21) finish_round waiting_for_acting 2016-03-08 15:19:04.200154 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..11) _active 2016-03-08 15:19:04.200155 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..11) remove_legacy_versions 2016-03-08 15:19:04.200158 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..11) _active creating new pending 2016-03-08 15:19:04.200163 7f82a6b55700 10 mon.a@0(leader).log v11 create_pending v 12 2016-03-08 15:19:04.200178 7f82a6b55700 7 mon.a@0(leader).log v11 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.200184 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd191dd40 0x559cd15546c0 log(last 21) v1 2016-03-08 15:19:04.200188 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 21) v1 -- ?+0 0x559cd15546c0 con 0x559cd1477080 2016-03-08 15:19:04.200192 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 21) v1 local 2016-03-08 15:19:04.200217 7f82a6b55700 7 mon.a@0(leader).log v11 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.200223 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd191e400 0x559cd1556f40 log(last 22) v1 2016-03-08 15:19:04.200217 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 21) v1 ==== 0+0+0 (0 0 0) 0x559cd15546c0 con 0x559cd1477080 2016-03-08 15:19:04.200225 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 22) v1 -- ?+0 0x559cd1556f40 con 0x559cd1477080 2016-03-08 15:19:04.200227 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 22) v1 local 2016-03-08 15:19:04.200235 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..21) finish_round waiting_for_readable 2016-03-08 15:19:04.200237 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..21) finish_round waiting_for_writeable 2016-03-08 15:19:04.200237 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..21) finish_round done w/ waiters, state 1 2016-03-08 15:19:04.200261 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.200265 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.200269 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.200271 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.200272 7f82a5352700 20 allow all 2016-03-08 15:19:04.200283 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15546c0 2016-03-08 15:19:04.200285 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 22) v1 ==== 0+0+0 (0 0 0) 0x559cd1556f40 con 0x559cd1477080 2016-03-08 15:19:04.200293 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.200295 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.200297 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.200297 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.200298 7f82a5352700 20 allow all 2016-03-08 15:19:04.200303 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1556f40 2016-03-08 15:19:04.303820 7f82a4350700 20 accepter.accepter poll got 1 2016-03-08 15:19:04.303831 7f82a4350700 10 accepter.pfd.revents=1 2016-03-08 15:19:04.303836 7f82a4350700 10 accepter.accepted incoming on sd 21 2016-03-08 15:19:04.303872 7f82a4350700 20 accepter.accepter calling poll 2016-03-08 15:19:04.303879 7f82aea26700 10 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd1974000 sd=21 :0 s=0 pgs=0 cs=0 l=0 c=0x559cd1978000).accept 2016-03-08 15:19:04.303937 7f82aea26700 1 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd1974000 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1978000).accept sd=21 127.0.0.1:52228/0 2016-03-08 15:19:04.303982 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1978000).accept peer addr is 127.0.0.1:0/2227251016 2016-03-08 15:19:04.303992 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1978000).accept got peer connect_seq 0 global_seq 1 2016-03-08 15:19:04.303998 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978000).accept of host_type 8, policy.lossy=1 policy.server=1 policy.standby=0 policy.resetcheck=0 2016-03-08 15:19:04.304002 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978000).accept my proto 15, their proto 15 2016-03-08 15:19:04.304007 7f82aea26700 10 mon.a@0(leader) e1 ms_verify_authorizer 127.0.0.1:0/2227251016 client protocol 0 2016-03-08 15:19:04.304020 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978000).accept: setting up session_security. 2016-03-08 15:19:04.304025 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978000).accept new session 2016-03-08 15:19:04.304029 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).accept success, connect_seq = 1, sending READY 2016-03-08 15:19:04.304034 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).accept features 576460752303423487 2016-03-08 15:19:04.304044 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).register_pipe 2016-03-08 15:19:04.304062 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).discard_requeued_up_to 0 2016-03-08 15:19:04.304068 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).accept starting writer, state open 2016-03-08 15:19:04.304082 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).accept done 2016-03-08 15:19:04.304091 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader reading tag... 2016-03-08 15:19:04.304096 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304103 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sleeping 2016-03-08 15:19:04.304109 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got KEEPALIVE2 2016-03-08 15:19:04.304085 2016-03-08 15:19:04.304120 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader reading tag... 2016-03-08 15:19:04.304122 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304127 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).write_keepalive2 15 2016-03-08 15:19:04.304085 2016-03-08 15:19:04.304136 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got MSG 2016-03-08 15:19:04.304141 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got envelope type=17 src client.? front=60 data=0 off 0 2016-03-08 15:19:04.304147 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader wants 60 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.304153 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304154 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader wants 60 from dispatch throttler 0/104857600 2016-03-08 15:19:04.304158 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sleeping 2016-03-08 15:19:04.304159 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got front 60 2016-03-08 15:19:04.304163 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).aborted = 0 2016-03-08 15:19:04.304166 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got 60 + 0 + 0 byte message 2016-03-08 15:19:04.304178 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got message 1 0x559cd17cac80 auth(proto 0 30 bytes epoch 0) v1 2016-03-08 15:19:04.304193 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd17cac80 prio 127 2016-03-08 15:19:04.304200 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader reading tag... 2016-03-08 15:19:04.304203 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/2227251016 1 ==== auth(proto 0 30 bytes epoch 0) v1 ==== 60+0+0 (900162395 0 0) 0x559cd17cac80 con 0x559cd1978000 2016-03-08 15:19:04.304217 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304227 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).write_ack 1 2016-03-08 15:19:04.304231 7f82a5352700 10 mon.a@0(leader) e1 _ms_dispatch new session 0x559cd1857600 MonSession(client.? 127.0.0.1:0/2227251016 is open) 2016-03-08 15:19:04.304232 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304235 7f82a5352700 20 mon.a@0(leader) e1 caps 2016-03-08 15:19:04.304241 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sleeping 2016-03-08 15:19:04.304240 7f82a5352700 10 mon.a@0(leader).paxosservice(auth 1..3) dispatch 0x559cd17cac80 auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/2227251016 con 0x559cd1978000 2016-03-08 15:19:04.304244 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..21) is_readable = 1 - now=2016-03-08 15:19:04.304244 lease_expire=0.000000 has v0 lc 21 2016-03-08 15:19:04.304248 7f82a5352700 10 mon.a@0(leader).auth v3 preprocess_query auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/2227251016 2016-03-08 15:19:04.304251 7f82a5352700 10 mon.a@0(leader).auth v3 prep_auth() blob_size=30 2016-03-08 15:19:04.304255 7f82a5352700 10 mon.a@0(leader).auth v3 AuthMonitor::assign_global_id m=auth(proto 0 30 bytes epoch 0) v1 mon=0/1 last_allocated=4101 max_global_id=14096 2016-03-08 15:19:04.304258 7f82a5352700 10 mon.a@0(leader).auth v3 next_global_id should be 4102 2016-03-08 15:19:04.304263 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2227251016 -- mon_map magic: 0 v1 -- ?+0 0x559cd1556f40 con 0x559cd1978000 2016-03-08 15:19:04.304268 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/2227251016, have pipe. 2016-03-08 15:19:04.304275 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304278 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer encoding 1 features 576460752303423487 0x559cd1556f40 mon_map magic: 0 v1 2016-03-08 15:19:04.304284 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer signed seq # 1): sig = 0 2016-03-08 15:19:04.304280 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd191e9a0 0x559cd1562300 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:04.304289 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sending 1 0x559cd1556f40 2016-03-08 15:19:04.304291 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2227251016 -- auth_reply(proto 1 0 (0) Success) v1 -- ?+0 0x559cd1562300 con 0x559cd1978000 2016-03-08 15:19:04.304297 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message auth_reply(proto 1 0 (0) Success) v1 remote, 127.0.0.1:0/2227251016, have pipe. 2016-03-08 15:19:04.304307 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304315 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 60 to dispatch throttler 60/104857600 2016-03-08 15:19:04.304318 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17cac80 2016-03-08 15:19:04.304317 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer encoding 2 features 576460752303423487 0x559cd1562300 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:04.304326 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer signed seq # 2): sig = 0 2016-03-08 15:19:04.304331 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sending 2 0x559cd1562300 2016-03-08 15:19:04.304349 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304353 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sleeping 2016-03-08 15:19:04.304415 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got ACK 2016-03-08 15:19:04.304422 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got ack seq 1 2016-03-08 15:19:04.304426 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader reading tag... 2016-03-08 15:19:04.304430 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got ACK 2016-03-08 15:19:04.304433 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got ack seq 2 2016-03-08 15:19:04.304437 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader reading tag... 2016-03-08 15:19:04.304440 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got MSG 2016-03-08 15:19:04.304444 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got envelope type=15 src client.? front=23 data=0 off 0 2016-03-08 15:19:04.304449 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader wants 23 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.304453 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader wants 23 from dispatch throttler 0/104857600 2016-03-08 15:19:04.304458 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got front 23 2016-03-08 15:19:04.304462 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).aborted = 0 2016-03-08 15:19:04.304465 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:04.304475 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got message 2 0x559cd1858800 mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:04.304482 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1858800 prio 127 2016-03-08 15:19:04.304487 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader reading tag... 2016-03-08 15:19:04.304490 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304489 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/2227251016 2 ==== mon_subscribe({monmap=0+}) v2 ==== 23+0+0 (1620593354 0 0) 0x559cd1858800 con 0x559cd1978000 2016-03-08 15:19:04.304499 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).write_ack 2 2016-03-08 15:19:04.304505 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got MSG 2016-03-08 15:19:04.304508 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304510 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got envelope type=15 src client.4102 front=23 data=0 off 0 2016-03-08 15:19:04.304512 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sleeping 2016-03-08 15:19:04.304514 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader wants 23 bytes from policy throttler 23/104857600 2016-03-08 15:19:04.304522 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857600 for client.? 127.0.0.1:0/2227251016 2016-03-08 15:19:04.304525 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.304524 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader wants 23 from dispatch throttler 23/104857600 2016-03-08 15:19:04.304529 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.304528 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got front 23 2016-03-08 15:19:04.304531 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.304532 7f82a5352700 20 allow all 2016-03-08 15:19:04.304533 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:04.304532 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).aborted = 0 2016-03-08 15:19:04.304535 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:04.304538 7f82a5352700 10 mon.a@0(leader) e1 check_sub monmap next 0 have 1 2016-03-08 15:19:04.304541 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2227251016 -- mon_map magic: 0 v1 -- ?+0 0x559cd15546c0 con 0x559cd1978000 2016-03-08 15:19:04.304543 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got message 3 0x559cd1858a00 mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:04.304547 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1858a00 prio 127 2016-03-08 15:19:04.304551 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader reading tag... 2016-03-08 15:19:04.304552 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/2227251016, have pipe. 2016-03-08 15:19:04.304557 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304562 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 46/104857600 2016-03-08 15:19:04.304563 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).write_ack 3 2016-03-08 15:19:04.304565 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1858800 2016-03-08 15:19:04.304568 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer encoding 3 features 576460752303423487 0x559cd15546c0 mon_map magic: 0 v1 2016-03-08 15:19:04.304571 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4102 127.0.0.1:0/2227251016 3 ==== mon_subscribe({osdmap=0}) v2 ==== 23+0+0 (319581110 0 0) 0x559cd1858a00 con 0x559cd1978000 2016-03-08 15:19:04.304574 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer signed seq # 3): sig = 0 2016-03-08 15:19:04.304580 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sending 3 0x559cd15546c0 2016-03-08 15:19:04.304595 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857600 for client.? 127.0.0.1:0/2227251016 2016-03-08 15:19:04.304598 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.304598 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304602 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.304604 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.304602 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sleeping 2016-03-08 15:19:04.304604 7f82a5352700 20 allow all 2016-03-08 15:19:04.304617 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:04.304619 7f82a5352700 20 is_capable service=osd command= read on cap allow * 2016-03-08 15:19:04.304620 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.304620 7f82a5352700 20 allow all 2016-03-08 15:19:04.304621 7f82a5352700 10 mon.a@0(leader).osd e4 check_sub 0x559cd188af00 next 0 (onetime) 2016-03-08 15:19:04.304626 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2227251016 -- osd_map(4..4 src has 1..4) v3 -- ?+0 0x559cd17cac80 con 0x559cd1978000 2016-03-08 15:19:04.304630 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message osd_map(4..4 src has 1..4) v3 remote, 127.0.0.1:0/2227251016, have pipe. 2016-03-08 15:19:04.304641 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 23/104857600 2016-03-08 15:19:04.304636 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304645 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1858a00 2016-03-08 15:19:04.304647 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer encoding 4 features 576460752303423487 0x559cd17cac80 osd_map(4..4 src has 1..4) v3 2016-03-08 15:19:04.304654 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer signed seq # 4): sig = 0 2016-03-08 15:19:04.304657 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sending 4 0x559cd17cac80 2016-03-08 15:19:04.304677 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.304682 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sleeping 2016-03-08 15:19:04.306438 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got ACK 2016-03-08 15:19:04.306447 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got ack seq 3 2016-03-08 15:19:04.306450 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader reading tag... 2016-03-08 15:19:04.306453 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got ACK 2016-03-08 15:19:04.306455 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got ack seq 4 2016-03-08 15:19:04.306458 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader reading tag... 2016-03-08 15:19:04.306461 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got MSG 2016-03-08 15:19:04.306464 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got envelope type=50 src client.4102 front=80 data=0 off 0 2016-03-08 15:19:04.306469 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader wants 80 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.306472 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader wants 80 from dispatch throttler 0/104857600 2016-03-08 15:19:04.306478 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got front 80 2016-03-08 15:19:04.306482 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).aborted = 0 2016-03-08 15:19:04.306485 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got 80 + 0 + 0 byte message 2016-03-08 15:19:04.306494 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got message 4 0x559cd1556f40 mon_command({"prefix": "get_command_descriptions"} v 0) v1 2016-03-08 15:19:04.306499 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1556f40 prio 127 2016-03-08 15:19:04.306504 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader reading tag... 2016-03-08 15:19:04.306506 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.306507 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4102 127.0.0.1:0/2227251016 4 ==== mon_command({"prefix": "get_command_descriptions"} v 0) v1 ==== 80+0+0 (3134233619 0 0) 0x559cd1556f40 con 0x559cd1978000 2016-03-08 15:19:04.306516 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).write_ack 4 2016-03-08 15:19:04.306523 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.306526 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sleeping 2016-03-08 15:19:04.306537 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857600 for client.? 127.0.0.1:0/2227251016 2016-03-08 15:19:04.306540 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.307377 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd191f3c0 0x559cd1555200 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:04.307379 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2227251016 -- mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 -- ?+37345 0x559cd1555200 con 0x559cd1978000 2016-03-08 15:19:04.307384 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 remote, 127.0.0.1:0/2227251016, have pipe. 2016-03-08 15:19:04.307391 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.307398 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 80 to dispatch throttler 80/104857600 2016-03-08 15:19:04.307397 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer encoding 5 features 576460752303423487 0x559cd1555200 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:04.307401 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1556f40 2016-03-08 15:19:04.307406 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer signed seq # 5): sig = 0 2016-03-08 15:19:04.307410 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sending 5 0x559cd1555200 2016-03-08 15:19:04.307437 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.307442 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sleeping 2016-03-08 15:19:04.309477 7f82a4350700 20 accepter.accepter poll got 1 2016-03-08 15:19:04.309483 7f82a4350700 10 accepter.pfd.revents=1 2016-03-08 15:19:04.309488 7f82a4350700 10 accepter.accepted incoming on sd 22 2016-03-08 15:19:04.309521 7f82a4350700 20 accepter.accepter calling poll 2016-03-08 15:19:04.309536 7f82a324d700 10 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd1975400 sd=22 :0 s=0 pgs=0 cs=0 l=0 c=0x559cd1978180).accept 2016-03-08 15:19:04.309577 7f82a324d700 1 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd1975400 sd=22 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1978180).accept sd=22 127.0.0.1:52230/0 2016-03-08 15:19:04.309616 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1978180).accept peer addr is 127.0.0.1:6800/4256 2016-03-08 15:19:04.309627 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1978180).accept got peer connect_seq 0 global_seq 1 2016-03-08 15:19:04.309633 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978180).accept of host_type 4, policy.lossy=1 policy.server=1 policy.standby=0 policy.resetcheck=0 2016-03-08 15:19:04.309636 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978180).accept my proto 15, their proto 15 2016-03-08 15:19:04.309639 7f82a324d700 10 mon.a@0(leader) e1 ms_verify_authorizer 127.0.0.1:6800/4256 osd protocol 0 2016-03-08 15:19:04.309643 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978180).accept: setting up session_security. 2016-03-08 15:19:04.309645 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978180).accept new session 2016-03-08 15:19:04.309647 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).accept success, connect_seq = 1, sending READY 2016-03-08 15:19:04.309649 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).accept features 576460752303423487 2016-03-08 15:19:04.309656 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).register_pipe 2016-03-08 15:19:04.309672 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).discard_requeued_up_to 0 2016-03-08 15:19:04.309677 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).accept starting writer, state open 2016-03-08 15:19:04.309701 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).accept done 2016-03-08 15:19:04.309710 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.309714 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.309723 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.309736 7f82a324d700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got KEEPALIVE2 2016-03-08 15:19:04.309721 2016-03-08 15:19:04.309745 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.309747 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.309752 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).write_keepalive2 15 2016-03-08 15:19:04.309721 2016-03-08 15:19:04.309769 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got MSG 2016-03-08 15:19:04.309776 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got envelope type=17 src osd.0 front=56 data=0 off 0 2016-03-08 15:19:04.309780 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 56 bytes from policy throttler 0/419430400 2016-03-08 15:19:04.309785 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 56 from dispatch throttler 0/104857600 2016-03-08 15:19:04.309785 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.309797 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.309800 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got front 56 2016-03-08 15:19:04.309804 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).aborted = 0 2016-03-08 15:19:04.309807 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got 56 + 0 + 0 byte message 2016-03-08 15:19:04.309818 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got message 1 0x559cd19a6000 auth(proto 0 26 bytes epoch 0) v1 2016-03-08 15:19:04.309824 7f82a324d700 20 -- 127.0.0.1:7104/0 queue 0x559cd19a6000 prio 127 2016-03-08 15:19:04.309827 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.309829 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.309833 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).write_ack 1 2016-03-08 15:19:04.309832 7f82a5352700 1 -- 127.0.0.1:7104/0 <== osd.0 127.0.0.1:6800/4256 1 ==== auth(proto 0 26 bytes epoch 0) v1 ==== 56+0+0 (2561107048 0 0) 0x559cd19a6000 con 0x559cd1978180 2016-03-08 15:19:04.309838 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.309840 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.309854 7f82a5352700 10 mon.a@0(leader) e1 _ms_dispatch new session 0x559cd1858a00 MonSession(osd.0 127.0.0.1:6800/4256 is open) 2016-03-08 15:19:04.309858 7f82a5352700 20 mon.a@0(leader) e1 caps 2016-03-08 15:19:04.309863 7f82a5352700 10 mon.a@0(leader).paxosservice(auth 1..3) dispatch 0x559cd19a6000 auth(proto 0 26 bytes epoch 0) v1 from osd.0 127.0.0.1:6800/4256 con 0x559cd1978180 2016-03-08 15:19:04.309867 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..21) is_readable = 1 - now=2016-03-08 15:19:04.309867 lease_expire=0.000000 has v0 lc 21 2016-03-08 15:19:04.309870 7f82a5352700 10 mon.a@0(leader).auth v3 preprocess_query auth(proto 0 26 bytes epoch 0) v1 from osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.309873 7f82a5352700 10 mon.a@0(leader).auth v3 prep_auth() blob_size=26 2016-03-08 15:19:04.309878 7f82a5352700 10 mon.a@0(leader).auth v3 AuthMonitor::assign_global_id m=auth(proto 0 26 bytes epoch 0) v1 mon=0/1 last_allocated=4102 max_global_id=14096 2016-03-08 15:19:04.309880 7f82a5352700 10 mon.a@0(leader).auth v3 next_global_id should be 4103 2016-03-08 15:19:04.309885 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:6800/4256 -- mon_map magic: 0 v1 -- ?+0 0x559cd1556f40 con 0x559cd1978180 2016-03-08 15:19:04.309888 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:6800/4256, have pipe. 2016-03-08 15:19:04.309895 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd191f600 0x559cd1563c00 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:04.309897 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.309899 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:6800/4256 -- auth_reply(proto 1 0 (0) Success) v1 -- ?+0 0x559cd1563c00 con 0x559cd1978180 2016-03-08 15:19:04.309900 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer encoding 1 features 576460752303423487 0x559cd1556f40 mon_map magic: 0 v1 2016-03-08 15:19:04.309909 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer signed seq # 1): sig = 0 2016-03-08 15:19:04.309914 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sending 1 0x559cd1556f40 2016-03-08 15:19:04.309915 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message auth_reply(proto 1 0 (0) Success) v1 remote, 127.0.0.1:6800/4256, have pipe. 2016-03-08 15:19:04.309925 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.309925 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 56 to dispatch throttler 56/104857600 2016-03-08 15:19:04.309932 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd19a6000 2016-03-08 15:19:04.309930 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer encoding 2 features 576460752303423487 0x559cd1563c00 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:04.309936 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer signed seq # 2): sig = 0 2016-03-08 15:19:04.309938 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sending 2 0x559cd1563c00 2016-03-08 15:19:04.309945 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.309948 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.310038 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ACK 2016-03-08 15:19:04.310044 7f82a324d700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ack seq 1 2016-03-08 15:19:04.310047 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.310049 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ACK 2016-03-08 15:19:04.310050 7f82a324d700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ack seq 2 2016-03-08 15:19:04.310052 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.310053 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got MSG 2016-03-08 15:19:04.310055 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got envelope type=15 src osd.0 front=23 data=0 off 0 2016-03-08 15:19:04.310057 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 23 bytes from policy throttler 0/419430400 2016-03-08 15:19:04.310060 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 23 from dispatch throttler 0/104857600 2016-03-08 15:19:04.310063 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got front 23 2016-03-08 15:19:04.310065 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).aborted = 0 2016-03-08 15:19:04.310066 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:04.310073 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got message 2 0x559cd1859400 mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:04.310076 7f82a324d700 20 -- 127.0.0.1:7104/0 queue 0x559cd1859400 prio 127 2016-03-08 15:19:04.310079 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.310081 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310084 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).write_ack 2 2016-03-08 15:19:04.310082 7f82a5352700 1 -- 127.0.0.1:7104/0 <== osd.0 127.0.0.1:6800/4256 2 ==== mon_subscribe({monmap=0+}) v2 ==== 23+0+0 (1620593354 0 0) 0x559cd1859400 con 0x559cd1978180 2016-03-08 15:19:04.310094 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got MSG 2016-03-08 15:19:04.310096 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310098 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got envelope type=15 src osd.0 front=23 data=0 off 0 2016-03-08 15:19:04.310100 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.310102 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 23 bytes from policy throttler 23/419430400 2016-03-08 15:19:04.310106 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 23 from dispatch throttler 23/104857600 2016-03-08 15:19:04.310107 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1858a00 for osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.310110 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.310110 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got front 23 2016-03-08 15:19:04.310113 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).aborted = 0 2016-03-08 15:19:04.310115 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.310117 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.310118 7f82a5352700 20 allow all 2016-03-08 15:19:04.310116 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:04.310119 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:04.310124 7f82a5352700 10 mon.a@0(leader) e1 check_sub monmap next 0 have 1 2016-03-08 15:19:04.310123 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got message 3 0x559cd1859600 mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:04.310126 7f82a324d700 20 -- 127.0.0.1:7104/0 queue 0x559cd1859600 prio 127 2016-03-08 15:19:04.310128 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:6800/4256 -- mon_map magic: 0 v1 -- ?+0 0x559cd1556ac0 con 0x559cd1978180 2016-03-08 15:19:04.310131 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.310131 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:6800/4256, have pipe. 2016-03-08 15:19:04.310137 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310140 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 46/104857600 2016-03-08 15:19:04.310143 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1859400 2016-03-08 15:19:04.310142 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).write_ack 3 2016-03-08 15:19:04.310145 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got MSG 2016-03-08 15:19:04.310147 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer encoding 3 features 576460752303423487 0x559cd1556ac0 mon_map magic: 0 v1 2016-03-08 15:19:04.310149 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got envelope type=15 src osd.0 front=31 data=0 off 0 2016-03-08 15:19:04.310149 7f82a5352700 1 -- 127.0.0.1:7104/0 <== osd.0 127.0.0.1:6800/4256 3 ==== mon_subscribe({osdmap=0}) v2 ==== 23+0+0 (319581110 0 0) 0x559cd1859600 con 0x559cd1978180 2016-03-08 15:19:04.310152 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer signed seq # 3): sig = 0 2016-03-08 15:19:04.310154 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sending 3 0x559cd1556ac0 2016-03-08 15:19:04.310153 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 31 bytes from policy throttler 23/419430400 2016-03-08 15:19:04.310157 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 31 from dispatch throttler 23/104857600 2016-03-08 15:19:04.310171 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1858a00 for osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.310174 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310176 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.310177 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.310181 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.310182 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.310182 7f82a5352700 20 allow all 2016-03-08 15:19:04.310183 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:04.310188 7f82a5352700 20 is_capable service=osd command= read on cap allow * 2016-03-08 15:19:04.310189 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.310161 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got front 31 2016-03-08 15:19:04.310190 7f82a5352700 20 allow all 2016-03-08 15:19:04.310191 7f82a5352700 10 mon.a@0(leader).osd e4 check_sub 0x559cd188b200 next 0 (onetime) 2016-03-08 15:19:04.310190 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).aborted = 0 2016-03-08 15:19:04.310194 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got 31 + 0 + 0 byte message 2016-03-08 15:19:04.310195 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:6800/4256 -- osd_map(4..4 src has 1..4) v3 -- ?+0 0x559cd19a6000 con 0x559cd1978180 2016-03-08 15:19:04.310200 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message osd_map(4..4 src has 1..4) v3 remote, 127.0.0.1:6800/4256, have pipe. 2016-03-08 15:19:04.310206 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310211 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 54/104857600 2016-03-08 15:19:04.310212 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1859600 2016-03-08 15:19:04.310212 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer encoding 4 features 576460752303423487 0x559cd19a6000 osd_map(4..4 src has 1..4) v3 2016-03-08 15:19:04.310220 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer signed seq # 4): sig = 0 2016-03-08 15:19:04.310225 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sending 4 0x559cd19a6000 2016-03-08 15:19:04.310227 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got message 4 0x559cd1859800 mon_subscribe({osd_pg_creates=0+}) v2 2016-03-08 15:19:04.310231 7f82a324d700 20 -- 127.0.0.1:7104/0 queue 0x559cd1859800 prio 127 2016-03-08 15:19:04.310234 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.310237 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got MSG 2016-03-08 15:19:04.310237 7f82a5352700 1 -- 127.0.0.1:7104/0 <== osd.0 127.0.0.1:6800/4256 4 ==== mon_subscribe({osd_pg_creates=0+}) v2 ==== 31+0+0 (2633385130 0 0) 0x559cd1859800 con 0x559cd1978180 2016-03-08 15:19:04.310251 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310253 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).write_ack 4 2016-03-08 15:19:04.310256 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310258 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.310261 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1858a00 for osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.310264 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.310268 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.310269 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.310269 7f82a5352700 20 allow all 2016-03-08 15:19:04.310270 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({osd_pg_creates=0+}) v2 2016-03-08 15:19:04.310268 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got envelope type=19 src osd.0 front=18 data=0 off 0 2016-03-08 15:19:04.310274 7f82a5352700 20 is_capable service=osd command= write on cap allow * 2016-03-08 15:19:04.310275 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.310275 7f82a5352700 20 allow all 2016-03-08 15:19:04.310273 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 18 bytes from policy throttler 31/419430400 2016-03-08 15:19:04.310277 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 18 from dispatch throttler 31/104857600 2016-03-08 15:19:04.310282 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 31 to dispatch throttler 49/104857600 2016-03-08 15:19:04.310282 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got front 18 2016-03-08 15:19:04.310284 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1859800 2016-03-08 15:19:04.310285 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).aborted = 0 2016-03-08 15:19:04.310288 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got 18 + 0 + 0 byte message 2016-03-08 15:19:04.310295 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got message 5 0x559cd1858000 mon_get_version(what=osdmap handle=1) v1 2016-03-08 15:19:04.310299 7f82a324d700 20 -- 127.0.0.1:7104/0 queue 0x559cd1858000 prio 127 2016-03-08 15:19:04.310303 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.310304 7f82a5352700 1 -- 127.0.0.1:7104/0 <== osd.0 127.0.0.1:6800/4256 5 ==== mon_get_version(what=osdmap handle=1) v1 ==== 18+0+0 (4194021778 0 0) 0x559cd1858000 con 0x559cd1978180 2016-03-08 15:19:04.310309 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310313 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).write_ack 5 2016-03-08 15:19:04.310318 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310321 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.310332 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1858a00 for osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.310334 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.310336 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.310337 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.310337 7f82a5352700 20 allow all 2016-03-08 15:19:04.310338 7f82a5352700 10 mon.a@0(leader) e1 handle_get_version mon_get_version(what=osdmap handle=1) v1 2016-03-08 15:19:04.310339 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..21) is_readable = 1 - now=2016-03-08 15:19:04.310339 lease_expire=0.000000 has v0 lc 21 2016-03-08 15:19:04.310344 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ACK 2016-03-08 15:19:04.310345 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:6800/4256 -- mon_check_map_ack(handle=1 version=4) v2 -- ?+0 0x559cd1859800 con 0x559cd1978180 2016-03-08 15:19:04.310349 7f82a324d700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ack seq 3 2016-03-08 15:19:04.310354 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_check_map_ack(handle=1 version=4) v2 remote, 127.0.0.1:6800/4256, have pipe. 2016-03-08 15:19:04.310363 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.310364 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 18 to dispatch throttler 18/104857600 2016-03-08 15:19:04.310362 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310367 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1858000 2016-03-08 15:19:04.310367 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ACK 2016-03-08 15:19:04.310368 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer encoding 5 features 576460752303423487 0x559cd1859800 mon_check_map_ack(handle=1 version=4) v2 2016-03-08 15:19:04.310371 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer signed seq # 5): sig = 0 2016-03-08 15:19:04.310375 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sending 5 0x559cd1859800 2016-03-08 15:19:04.310376 7f82a324d700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ack seq 4 2016-03-08 15:19:04.310378 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.310379 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got MSG 2016-03-08 15:19:04.310381 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got envelope type=15 src osd.0 front=23 data=0 off 0 2016-03-08 15:19:04.310383 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 23 bytes from policy throttler 0/419430400 2016-03-08 15:19:04.310385 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310387 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.310387 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 23 from dispatch throttler 0/104857600 2016-03-08 15:19:04.310390 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got front 23 2016-03-08 15:19:04.310393 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).aborted = 0 2016-03-08 15:19:04.310396 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:04.310404 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got message 6 0x559cd1857e00 mon_subscribe({osdmap=1}) v2 2016-03-08 15:19:04.310408 7f82a324d700 20 -- 127.0.0.1:7104/0 queue 0x559cd1857e00 prio 127 2016-03-08 15:19:04.310412 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.310417 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310420 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).write_ack 6 2016-03-08 15:19:04.310425 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310429 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.310657 7f82a5352700 1 -- 127.0.0.1:7104/0 <== osd.0 127.0.0.1:6800/4256 6 ==== mon_subscribe({osdmap=1}) v2 ==== 23+0+0 (3878862334 0 0) 0x559cd1857e00 con 0x559cd1978180 2016-03-08 15:19:04.310680 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1858a00 for osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.310683 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.310689 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.310689 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.310691 7f82a5352700 20 allow all 2016-03-08 15:19:04.310691 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({osdmap=1}) v2 2016-03-08 15:19:04.310694 7f82a5352700 20 is_capable service=osd command= read on cap allow * 2016-03-08 15:19:04.310694 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.310694 7f82a5352700 20 allow all 2016-03-08 15:19:04.310695 7f82a5352700 10 mon.a@0(leader).osd e4 check_sub 0x559cd188b2c0 next 1 (onetime) 2016-03-08 15:19:04.310696 7f82a5352700 5 mon.a@0(leader).osd e4 send_incremental [1..4] to osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.310698 7f82a5352700 10 mon.a@0(leader).osd e4 build_incremental [1..4] 2016-03-08 15:19:04.310700 7f82a5352700 20 mon.a@0(leader).osd e4 build_incremental inc 4 199 bytes 2016-03-08 15:19:04.310701 7f82a5352700 20 mon.a@0(leader).osd e4 build_incremental inc 3 416 bytes 2016-03-08 15:19:04.310704 7f82a5352700 20 mon.a@0(leader).osd e4 build_incremental inc 2 182 bytes 2016-03-08 15:19:04.310705 7f82a5352700 20 mon.a@0(leader).osd e4 build_incremental inc 1 975 bytes 2016-03-08 15:19:04.310706 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:6800/4256 -- osd_map(1..4 src has 1..4) v3 -- ?+0 0x559cd1561b80 con 0x559cd1978180 2016-03-08 15:19:04.310709 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message osd_map(1..4 src has 1..4) v3 remote, 127.0.0.1:6800/4256, have pipe. 2016-03-08 15:19:04.310714 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310719 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 23/104857600 2016-03-08 15:19:04.310721 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1857e00 2016-03-08 15:19:04.310720 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer encoding 6 features 576460752303423487 0x559cd1561b80 osd_map(1..4 src has 1..4) v3 2016-03-08 15:19:04.310731 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer signed seq # 6): sig = 0 2016-03-08 15:19:04.310736 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sending 6 0x559cd1561b80 2016-03-08 15:19:04.310758 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.310763 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.355423 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got ACK 2016-03-08 15:19:04.355439 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got ack seq 5 2016-03-08 15:19:04.355452 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader reading tag... 2016-03-08 15:19:04.355454 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got MSG 2016-03-08 15:19:04.355457 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got envelope type=50 src client.4102 front=148 data=0 off 0 2016-03-08 15:19:04.355468 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader wants 148 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.355472 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader wants 148 from dispatch throttler 0/104857600 2016-03-08 15:19:04.355477 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got front 148 2016-03-08 15:19:04.355479 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).aborted = 0 2016-03-08 15:19:04.355481 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got 148 + 0 + 0 byte message 2016-03-08 15:19:04.355491 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader got message 5 0x559cd1556f40 mon_command({"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0} v 0) v1 2016-03-08 15:19:04.355507 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1556f40 prio 127 2016-03-08 15:19:04.355513 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader reading tag... 2016-03-08 15:19:04.355516 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.355523 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).write_ack 5 2016-03-08 15:19:04.355529 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.355531 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sleeping 2016-03-08 15:19:04.355526 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4102 127.0.0.1:0/2227251016 5 ==== mon_command({"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0} v 0) v1 ==== 148+0+0 (4206888738 0 0) 0x559cd1556f40 con 0x559cd1978000 2016-03-08 15:19:04.355559 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857600 for client.? 127.0.0.1:0/2227251016 2016-03-08 15:19:04.355562 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.355617 7f82a5352700 0 mon.a@0(leader) e1 handle_command mon_command({"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0} v 0) v1 2016-03-08 15:19:04.355653 7f82a5352700 20 is_capable service=osd command=osd crush create-or-move read write on cap allow * 2016-03-08 15:19:04.355655 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.355656 7f82a5352700 20 allow all 2016-03-08 15:19:04.355657 7f82a5352700 10 mon.a@0(leader) e1 _allowed_command capable 2016-03-08 15:19:04.355661 7f82a5352700 0 log_channel(audit) log [INF] : from='client.? 127.0.0.1:0/2227251016' entity='client.admin' cmd=[{"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0}]: dispatch 2016-03-08 15:19:04.355672 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 23 at 2016-03-08 15:19:04.355662) v1 -- ?+0 0x559cd1562300 con 0x559cd1477080 2016-03-08 15:19:04.355677 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 23 at 2016-03-08 15:19:04.355662) v1 local 2016-03-08 15:19:04.355686 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..4) dispatch 0x559cd1556f40 mon_command({"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0} v 0) v1 from client.4102 127.0.0.1:0/2227251016 con 0x559cd1978000 2016-03-08 15:19:04.355689 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..21) is_readable = 1 - now=2016-03-08 15:19:04.355689 lease_expire=0.000000 has v0 lc 21 2016-03-08 15:19:04.355695 7f82a5352700 10 mon.a@0(leader).osd e4 preprocess_query mon_command({"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0} v 0) v1 from client.4102 127.0.0.1:0/2227251016 2016-03-08 15:19:04.355733 7f82a5352700 7 mon.a@0(leader).osd e4 prepare_update mon_command({"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0} v 0) v1 from client.4102 127.0.0.1:0/2227251016 2016-03-08 15:19:04.355761 7f82a5352700 0 mon.a@0(leader).osd e4 create-or-move crush item name 'osd.0' initial_weight 1 at location {host=localhost,root=default} 2016-03-08 15:19:04.355806 7f82a5352700 10 mon.a@0(leader).osd e4 should_propose 2016-03-08 15:19:04.355817 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..4) setting proposal_timer 0x559cd14945b0 with delay of 0.05 2016-03-08 15:19:04.355823 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 148 to dispatch throttler 148/104857600 2016-03-08 15:19:04.355825 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1556f40 2016-03-08 15:19:04.355827 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 23 at 2016-03-08 15:19:04.355662) v1 ==== 0+0+0 (0 0 0) 0x559cd1562300 con 0x559cd1477080 2016-03-08 15:19:04.355840 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.355843 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.355849 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..11) dispatch 0x559cd1562300 log(1 entries from seq 23 at 2016-03-08 15:19:04.355662) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.355853 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..21) is_readable = 1 - now=2016-03-08 15:19:04.355853 lease_expire=0.000000 has v0 lc 21 2016-03-08 15:19:04.355858 7f82a5352700 10 mon.a@0(leader).log v11 preprocess_query log(1 entries from seq 23 at 2016-03-08 15:19:04.355662) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.355862 7f82a5352700 10 mon.a@0(leader).log v11 preprocess_log log(1 entries from seq 23 at 2016-03-08 15:19:04.355662) v1 from mon.0 2016-03-08 15:19:04.355865 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.355866 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.355867 7f82a5352700 20 allow all 2016-03-08 15:19:04.355872 7f82a5352700 10 mon.a@0(leader).log v11 prepare_update log(1 entries from seq 23 at 2016-03-08 15:19:04.355662) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.355877 7f82a5352700 10 mon.a@0(leader).log v11 prepare_log log(1 entries from seq 23 at 2016-03-08 15:19:04.355662) v1 from mon.0 2016-03-08 15:19:04.355878 7f82a5352700 10 mon.a@0(leader).log v11 logging 2016-03-08 15:19:04.355662 mon.0 127.0.0.1:7104/0 23 : audit [INF] from='client.? 127.0.0.1:0/2227251016' entity='client.admin' cmd=[{"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0}]: dispatch 2016-03-08 15:19:04.355885 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..11) setting proposal_timer 0x559cd14945c0 with delay of 0.05 2016-03-08 15:19:04.355888 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1562300 2016-03-08 15:19:04.405902 7f82a5b53700 10 mon.a@0(leader).paxosservice(osdmap 1..4) propose_pending 2016-03-08 15:19:04.405920 7f82a5b53700 10 mon.a@0(leader).osd e4 encode_pending e 5 2016-03-08 15:19:04.406008 7f82a5b53700 20 mon.a@0(leader).osd e4 full_crc 3175402483 inc_crc 1249020557 2016-03-08 15:19:04.406025 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..21) queue_pending_finisher 0x559cd1494510 2016-03-08 15:19:04.406032 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..21) trigger_propose active, proposing now 2016-03-08 15:19:04.406040 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..21) propose_pending 22 2208 bytes 2016-03-08 15:19:04.406043 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..21) begin for 22 2208 bytes 2016-03-08 15:19:04.412315 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..21) commit_start 22 2016-03-08 15:19:04.412356 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..11) propose_pending 2016-03-08 15:19:04.412362 7f82a5b53700 10 mon.a@0(leader).log v11 encode_full log v 11 2016-03-08 15:19:04.412397 7f82a5b53700 10 mon.a@0(leader).log v11 encode_pending v12 2016-03-08 15:19:04.412419 7f82a5b53700 5 mon.a@0(leader).paxos(paxos writing c 1..21) queue_pending_finisher 0x559cd14945b0 2016-03-08 15:19:04.412424 7f82a5b53700 10 mon.a@0(leader).paxos(paxos writing c 1..21) trigger_propose not active, will propose later 2016-03-08 15:19:04.418458 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..21) commit_finish 22 2016-03-08 15:19:04.418498 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:04.418514 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) refresh 2016-03-08 15:19:04.418525 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:04.418533 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..5) refresh 2016-03-08 15:19:04.418535 7f82a6b55700 15 mon.a@0(leader).osd e4 update_from_paxos paxos e 5, my e 4 2016-03-08 15:19:04.418549 7f82a6b55700 7 mon.a@0(leader).osd e4 update_from_paxos applying incremental 5 2016-03-08 15:19:04.418595 7f82a6b55700 1 mon.a@0(leader).osd e5 e5: 1 osds: 0 up, 0 in 2016-03-08 15:19:04.425718 7f82a6b55700 5 mon.a@0(leader).paxos(paxos refresh c 1..22) is_readable = 0 - now=2016-03-08 15:19:04.425721 lease_expire=0.000000 has v0 lc 22 2016-03-08 15:19:04.425734 7f82a6b55700 10 mon.a@0(leader).pg v5 check_osd_map -- osdmap not readable, waiting 2016-03-08 15:19:04.425737 7f82a6b55700 10 mon.a@0(leader).osd e5 check_subs 2016-03-08 15:19:04.425739 7f82a6b55700 10 mon.a@0(leader).osd e5 share_map_with_random_osd no up osds, don't share with anyone 2016-03-08 15:19:04.425740 7f82a6b55700 10 mon.a@0(leader).osd e5 update_logger 2016-03-08 15:19:04.425770 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..11) refresh 2016-03-08 15:19:04.425772 7f82a6b55700 10 mon.a@0(leader).log v11 update_from_paxos 2016-03-08 15:19:04.425774 7f82a6b55700 10 mon.a@0(leader).log v11 update_from_paxos version 11 summary v 11 2016-03-08 15:19:04.425783 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:04.425790 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) refresh 2016-03-08 15:19:04.425795 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos 2016-03-08 15:19:04.425797 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) post_refresh 2016-03-08 15:19:04.425798 7f82a6b55700 10 mon.a@0(leader).pg v5 post_paxos_update 2016-03-08 15:19:04.425799 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:04.425800 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..5) post_refresh 2016-03-08 15:19:04.425801 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..11) post_refresh 2016-03-08 15:19:04.425802 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:04.425802 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) post_refresh 2016-03-08 15:19:04.425804 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..22) commit_proposal 2016-03-08 15:19:04.425806 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..5) _active - not active 2016-03-08 15:19:04.425808 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..22) finish_round 2016-03-08 15:19:04.425809 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..22) finish_round waiting_for_acting 2016-03-08 15:19:04.425810 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..5) _active 2016-03-08 15:19:04.425811 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..5) remove_legacy_versions 2016-03-08 15:19:04.425815 7f82a6b55700 7 mon.a@0(leader).paxosservice(osdmap 1..5) _active creating new pending 2016-03-08 15:19:04.425818 7f82a6b55700 10 mon.a@0(leader).osd e5 create_pending e 6 2016-03-08 15:19:04.425851 7f82a6b55700 0 log_channel(audit) log [INF] : from='client.? 127.0.0.1:0/2227251016' entity='client.admin' cmd='[{"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0}]': finished 2016-03-08 15:19:04.425864 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 24 at 2016-03-08 15:19:04.425854) v1 -- ?+0 0x559cd17cca80 con 0x559cd1477080 2016-03-08 15:19:04.425871 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 24 at 2016-03-08 15:19:04.425854) v1 local 2016-03-08 15:19:04.425881 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd19965a0 0x559cd1557600 mon_command_ack([{"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0}]=0 create-or-move updating item name 'osd.0' weight 1 at location {host=localhost,root=default} to crush map v5) v1 2016-03-08 15:19:04.425885 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2227251016 -- mon_command_ack([{"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0}]=0 create-or-move updating item name 'osd.0' weight 1 at location {host=localhost,root=default} to crush map v5) v1 -- ?+0 0x559cd1557600 con 0x559cd1978000 2016-03-08 15:19:04.425888 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0}]=0 create-or-move updating item name 'osd.0' weight 1 at location {host=localhost,root=default} to crush map v5) v1 remote, 127.0.0.1:0/2227251016, have pipe. 2016-03-08 15:19:04.425897 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.425905 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..22) is_readable = 1 - now=2016-03-08 15:19:04.425906 lease_expire=0.000000 has v0 lc 22 2016-03-08 15:19:04.425913 7f82a6b55700 10 mon.a@0(leader).pg v5 check_osd_map applying osdmap e5 to pg_map 2016-03-08 15:19:04.425902 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 24 at 2016-03-08 15:19:04.425854) v1 ==== 0+0+0 (0 0 0) 0x559cd17cca80 con 0x559cd1477080 2016-03-08 15:19:04.425914 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer encoding 6 features 576460752303423487 0x559cd1557600 mon_command_ack([{"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0}]=0 create-or-move updating item name 'osd.0' weight 1 at location {host=localhost,root=default} to crush map v5) v1 2016-03-08 15:19:04.425921 7f82a6b55700 10 mon.a@0(leader).pg v5 map_pg_creates to 4 pgs, osdmap epoch 5 2016-03-08 15:19:04.425924 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer signed seq # 6): sig = 0 2016-03-08 15:19:04.425927 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sending 6 0x559cd1557600 2016-03-08 15:19:04.425973 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer: state = open policy.server=1 2016-03-08 15:19:04.425983 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).writer sleeping 2016-03-08 15:19:04.425991 7f82a6b55700 10 mon.a@0(leader).pg v5 register_new_pgs checking pg pools for osdmap epoch 5, last_pg_scan 4 2016-03-08 15:19:04.425994 7f82a6b55700 10 mon.a@0(leader).pg v5 no change in pool 1 replicated size 3 min_size 2 crush_ruleset 0 object_hash rjenkins pg_num 4 pgp_num 4 last_change 3 flags hashpspool stripe_width 0 2016-03-08 15:19:04.425998 7f82a6b55700 10 mon.a@0(leader).pg v5 register_new_pgs registered 0 new pgs, removed 0 uncreated pgs 2016-03-08 15:19:04.425999 7f82a6b55700 10 mon.a@0(leader).pg v5 check_down_pgs 2016-03-08 15:19:04.426001 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..5) propose_pending 2016-03-08 15:19:04.426003 7f82a6b55700 10 mon.a@0(leader).pg v5 encode_pending v 6 2016-03-08 15:19:04.426014 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..22) queue_pending_finisher 0x559cd1494460 2016-03-08 15:19:04.426017 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..22) trigger_propose active, proposing now 2016-03-08 15:19:04.426030 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..22) propose_pending 23 7278 bytes 2016-03-08 15:19:04.426031 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..22) begin for 23 7278 bytes 2016-03-08 15:19:04.438440 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..22) commit_start 23 2016-03-08 15:19:04.438486 7f82a6b55700 10 mon.a@0(leader).osd e5 update_logger 2016-03-08 15:19:04.438505 7f82a6b55700 0 log_channel(cluster) log [INF] : osdmap e5: 1 osds: 0 up, 0 in 2016-03-08 15:19:04.438523 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 25 at 2016-03-08 15:19:04.438506) v1 -- ?+0 0x559cd1562580 con 0x559cd1477080 2016-03-08 15:19:04.438530 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 25 at 2016-03-08 15:19:04.438506) v1 local 2016-03-08 15:19:04.438537 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..22) finish_round waiting_for_readable 2016-03-08 15:19:04.438539 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..22) finish_round waiting_for_writeable 2016-03-08 15:19:04.438540 7f82a6b55700 10 mon.a@0(leader).paxos(paxos writing c 1..22) finish_round done w/ waiters, state 4 2016-03-08 15:19:04.438596 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.438602 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.438620 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..11) dispatch 0x559cd17cca80 log(1 entries from seq 24 at 2016-03-08 15:19:04.425854) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.438625 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..22) is_readable = 1 - now=2016-03-08 15:19:04.438626 lease_expire=0.000000 has v0 lc 22 2016-03-08 15:19:04.438638 7f82a5352700 10 mon.a@0(leader).log v11 preprocess_query log(1 entries from seq 24 at 2016-03-08 15:19:04.425854) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.438644 7f82a5352700 10 mon.a@0(leader).log v11 preprocess_log log(1 entries from seq 24 at 2016-03-08 15:19:04.425854) v1 from mon.0 2016-03-08 15:19:04.438652 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.438653 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.438654 7f82a5352700 20 allow all 2016-03-08 15:19:04.438658 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..11) waiting for paxos -> writeable 2016-03-08 15:19:04.438675 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17cca80 2016-03-08 15:19:04.438684 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 25 at 2016-03-08 15:19:04.438506) v1 ==== 0+0+0 (0 0 0) 0x559cd1562580 con 0x559cd1477080 2016-03-08 15:19:04.438712 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.438714 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.438720 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..11) dispatch 0x559cd1562580 log(1 entries from seq 25 at 2016-03-08 15:19:04.438506) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.438726 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..22) is_readable = 1 - now=2016-03-08 15:19:04.438726 lease_expire=0.000000 has v0 lc 22 2016-03-08 15:19:04.438736 7f82a5352700 10 mon.a@0(leader).log v11 preprocess_query log(1 entries from seq 25 at 2016-03-08 15:19:04.438506) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.438743 7f82a5352700 10 mon.a@0(leader).log v11 preprocess_log log(1 entries from seq 25 at 2016-03-08 15:19:04.438506) v1 from mon.0 2016-03-08 15:19:04.438745 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.438747 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.438748 7f82a5352700 20 allow all 2016-03-08 15:19:04.438749 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..11) waiting for paxos -> writeable 2016-03-08 15:19:04.438756 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1562580 2016-03-08 15:19:04.445667 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..22) commit_finish 23 2016-03-08 15:19:04.445706 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:04.445724 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..6) refresh 2016-03-08 15:19:04.445726 7f82a6b55700 10 mon.a@0(leader).pg v5 update_from_paxos read_incremental 2016-03-08 15:19:04.445736 7f82a6b55700 10 mon.a@0(leader).pg v6 read_pgmap_meta 2016-03-08 15:19:04.445750 7f82a6b55700 10 mon.a@0(leader).pg v6 update_logger 2016-03-08 15:19:04.445760 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:04.445767 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..5) refresh 2016-03-08 15:19:04.445776 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..12) refresh 2016-03-08 15:19:04.445777 7f82a6b55700 10 mon.a@0(leader).log v12 update_from_paxos 2016-03-08 15:19:04.445782 7f82a6b55700 10 mon.a@0(leader).log v12 update_from_paxos version 12 summary v 11 2016-03-08 15:19:04.445786 7f82a6b55700 10 mon.a@0(leader).log v12 update_from_paxos latest full 11 2016-03-08 15:19:04.445791 7f82a6b55700 7 mon.a@0(leader).log v12 update_from_paxos applying incremental log 12 2016-03-08 15:19:04.355662 mon.0 127.0.0.1:7104/0 23 : audit [INF] from='client.? 127.0.0.1:0/2227251016' entity='client.admin' cmd=[{"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0}]: dispatch 2016-03-08 15:19:04.445805 7f82a6b55700 20 mon.a@0(leader).log v12 update_from_paxos logging for channel 'audit' to file 'testdir/osd-crush/log' 2016-03-08 15:19:04.445813 7f82a6b55700 15 mon.a@0(leader).log v12 update_from_paxos logging for 1 channels 2016-03-08 15:19:04.445814 7f82a6b55700 15 mon.a@0(leader).log v12 update_from_paxos channel 'audit' logging 251 bytes 2016-03-08 15:19:04.445831 7f82a6b55700 10 mon.a@0(leader).log v12 check_subs 2016-03-08 15:19:04.445846 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:04.445854 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) refresh 2016-03-08 15:19:04.445856 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos 2016-03-08 15:19:04.445858 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..6) post_refresh 2016-03-08 15:19:04.445859 7f82a6b55700 10 mon.a@0(leader).pg v6 post_paxos_update 2016-03-08 15:19:04.445860 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:04.445860 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..5) post_refresh 2016-03-08 15:19:04.445861 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..12) post_refresh 2016-03-08 15:19:04.445862 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:04.445862 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) post_refresh 2016-03-08 15:19:04.445864 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..23) commit_proposal 2016-03-08 15:19:04.445866 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..12) _active - not active 2016-03-08 15:19:04.445867 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..6) _active - not active 2016-03-08 15:19:04.445868 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..23) finish_round 2016-03-08 15:19:04.445869 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..23) finish_round waiting_for_acting 2016-03-08 15:19:04.445870 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..12) _active 2016-03-08 15:19:04.445871 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..12) remove_legacy_versions 2016-03-08 15:19:04.445883 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..12) _active creating new pending 2016-03-08 15:19:04.445889 7f82a6b55700 10 mon.a@0(leader).log v12 create_pending v 13 2016-03-08 15:19:04.445898 7f82a6b55700 7 mon.a@0(leader).log v12 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.445905 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd1996b40 0x559cd1556f40 log(last 23) v1 2016-03-08 15:19:04.445907 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 23) v1 -- ?+0 0x559cd1556f40 con 0x559cd1477080 2016-03-08 15:19:04.445910 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 23) v1 local 2016-03-08 15:19:04.445938 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..12) dispatch 0x559cd17cca80 log(1 entries from seq 24 at 2016-03-08 15:19:04.425854) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.445947 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 23) v1 ==== 0+0+0 (0 0 0) 0x559cd1556f40 con 0x559cd1477080 2016-03-08 15:19:04.445963 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..23) is_readable = 1 - now=2016-03-08 15:19:04.445964 lease_expire=0.000000 has v0 lc 23 2016-03-08 15:19:04.445975 7f82a6b55700 10 mon.a@0(leader).log v12 preprocess_query log(1 entries from seq 24 at 2016-03-08 15:19:04.425854) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.445984 7f82a6b55700 10 mon.a@0(leader).log v12 preprocess_log log(1 entries from seq 24 at 2016-03-08 15:19:04.425854) v1 from mon.0 2016-03-08 15:19:04.445988 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.445990 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.445991 7f82a6b55700 20 allow all 2016-03-08 15:19:04.445997 7f82a6b55700 10 mon.a@0(leader).log v12 prepare_update log(1 entries from seq 24 at 2016-03-08 15:19:04.425854) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.446003 7f82a6b55700 10 mon.a@0(leader).log v12 prepare_log log(1 entries from seq 24 at 2016-03-08 15:19:04.425854) v1 from mon.0 2016-03-08 15:19:04.446005 7f82a6b55700 10 mon.a@0(leader).log v12 logging 2016-03-08 15:19:04.425854 mon.0 127.0.0.1:7104/0 24 : audit [INF] from='client.? 127.0.0.1:0/2227251016' entity='client.admin' cmd='[{"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0}]': finished 2016-03-08 15:19:04.446018 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..12) setting proposal_timer 0x559cd1494460 with delay of 0.0996585 2016-03-08 15:19:04.446036 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..12) dispatch 0x559cd1562580 log(1 entries from seq 25 at 2016-03-08 15:19:04.438506) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.446040 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..23) is_readable = 1 - now=2016-03-08 15:19:04.446041 lease_expire=0.000000 has v0 lc 23 2016-03-08 15:19:04.446046 7f82a6b55700 10 mon.a@0(leader).log v12 preprocess_query log(1 entries from seq 25 at 2016-03-08 15:19:04.438506) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.446055 7f82a6b55700 10 mon.a@0(leader).log v12 preprocess_log log(1 entries from seq 25 at 2016-03-08 15:19:04.438506) v1 from mon.0 2016-03-08 15:19:04.446061 7f82a6b55700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.446062 7f82a6b55700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.446063 7f82a6b55700 20 allow all 2016-03-08 15:19:04.446077 7f82a6b55700 10 mon.a@0(leader).log v12 prepare_update log(1 entries from seq 25 at 2016-03-08 15:19:04.438506) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.446085 7f82a6b55700 10 mon.a@0(leader).log v12 prepare_log log(1 entries from seq 25 at 2016-03-08 15:19:04.438506) v1 from mon.0 2016-03-08 15:19:04.446087 7f82a6b55700 10 mon.a@0(leader).log v12 logging 2016-03-08 15:19:04.438506 mon.0 127.0.0.1:7104/0 25 : cluster [INF] osdmap e5: 1 osds: 0 up, 0 in 2016-03-08 15:19:04.446093 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..12) proposal_timer already set 2016-03-08 15:19:04.446095 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..6) _active 2016-03-08 15:19:04.446096 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..6) remove_legacy_versions 2016-03-08 15:19:04.446103 7f82a6b55700 7 mon.a@0(leader).paxosservice(pgmap 1..6) _active creating new pending 2016-03-08 15:19:04.446105 7f82a6b55700 10 mon.a@0(leader).pg v6 create_pending v 7 2016-03-08 15:19:04.446107 7f82a6b55700 10 mon.a@0(leader).pg v6 check_osd_map already seen 5 >= 5 2016-03-08 15:19:04.446108 7f82a6b55700 10 mon.a@0(leader).pg v6 update_logger 2016-03-08 15:19:04.446118 7f82a6b55700 0 log_channel(cluster) log [INF] : pgmap v6: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:04.446128 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 26 at 2016-03-08 15:19:04.446119) v1 -- ?+0 0x559cd1562300 con 0x559cd1477080 2016-03-08 15:19:04.446132 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 26 at 2016-03-08 15:19:04.446119) v1 local 2016-03-08 15:19:04.446138 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..23) finish_round waiting_for_readable 2016-03-08 15:19:04.446141 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..23) finish_round waiting_for_writeable 2016-03-08 15:19:04.446142 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..23) finish_round done w/ waiters, state 1 2016-03-08 15:19:04.446172 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.446176 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.446180 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.446182 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.446183 7f82a5352700 20 allow all 2016-03-08 15:19:04.446195 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1556f40 2016-03-08 15:19:04.446200 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 26 at 2016-03-08 15:19:04.446119) v1 ==== 0+0+0 (0 0 0) 0x559cd1562300 con 0x559cd1477080 2016-03-08 15:19:04.446218 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.446221 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.446227 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..12) dispatch 0x559cd1562300 log(1 entries from seq 26 at 2016-03-08 15:19:04.446119) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.446232 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..23) is_readable = 1 - now=2016-03-08 15:19:04.446232 lease_expire=0.000000 has v0 lc 23 2016-03-08 15:19:04.446239 7f82a5352700 10 mon.a@0(leader).log v12 preprocess_query log(1 entries from seq 26 at 2016-03-08 15:19:04.446119) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.446247 7f82a5352700 10 mon.a@0(leader).log v12 preprocess_log log(1 entries from seq 26 at 2016-03-08 15:19:04.446119) v1 from mon.0 2016-03-08 15:19:04.446250 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.446251 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.446251 7f82a5352700 20 allow all 2016-03-08 15:19:04.446256 7f82a5352700 10 mon.a@0(leader).log v12 prepare_update log(1 entries from seq 26 at 2016-03-08 15:19:04.446119) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.446268 7f82a5352700 10 mon.a@0(leader).log v12 prepare_log log(1 entries from seq 26 at 2016-03-08 15:19:04.446119) v1 from mon.0 2016-03-08 15:19:04.446270 7f82a5352700 10 mon.a@0(leader).log v12 logging 2016-03-08 15:19:04.446119 mon.0 127.0.0.1:7104/0 26 : cluster [INF] pgmap v6: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:04.446279 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..12) proposal_timer already set 2016-03-08 15:19:04.446282 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1562300 2016-03-08 15:19:04.449218 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ACK 2016-03-08 15:19:04.449233 7f82a324d700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ack seq 5 2016-03-08 15:19:04.449238 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.449240 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ACK 2016-03-08 15:19:04.449242 7f82a324d700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ack seq 6 2016-03-08 15:19:04.449244 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.449246 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got MSG 2016-03-08 15:19:04.449257 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got envelope type=71 src osd.0 front=1763 data=0 off 0 2016-03-08 15:19:04.449261 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 1763 bytes from policy throttler 0/419430400 2016-03-08 15:19:04.449265 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 1763 from dispatch throttler 0/104857600 2016-03-08 15:19:04.449270 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got front 1763 2016-03-08 15:19:04.449272 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).aborted = 0 2016-03-08 15:19:04.449274 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got 1763 + 0 + 0 byte message 2016-03-08 15:19:04.449306 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got message 7 0x559cd1567200 osd_boot(osd.0 booted 0 features 576460752303423487 v0) v6 2016-03-08 15:19:04.449316 7f82a324d700 20 -- 127.0.0.1:7104/0 queue 0x559cd1567200 prio 127 2016-03-08 15:19:04.449321 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.449347 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.449350 7f82a5352700 1 -- 127.0.0.1:7104/0 <== osd.0 127.0.0.1:6800/4256 7 ==== osd_boot(osd.0 booted 0 features 576460752303423487 v0) v6 ==== 1763+0+0 (1906605510 0 0) 0x559cd1567200 con 0x559cd1978180 2016-03-08 15:19:04.449363 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).write_ack 7 2016-03-08 15:19:04.449373 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.449378 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.449401 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1858a00 for osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.449407 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.449418 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..5) dispatch 0x559cd1567200 osd_boot(osd.0 booted 0 features 576460752303423487 v0) v6 from osd.0 127.0.0.1:6800/4256 con 0x559cd1978180 2016-03-08 15:19:04.449422 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..23) is_readable = 1 - now=2016-03-08 15:19:04.449422 lease_expire=0.000000 has v0 lc 23 2016-03-08 15:19:04.449430 7f82a5352700 10 mon.a@0(leader).osd e5 preprocess_query osd_boot(osd.0 booted 0 features 576460752303423487 v0) v6 from osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.449435 7f82a5352700 20 is_capable service=osd command= exec on cap allow * 2016-03-08 15:19:04.449439 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.449440 7f82a5352700 20 allow all 2016-03-08 15:19:04.449445 7f82a5352700 10 mon.a@0(leader).osd e5 preprocess_boot from osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.449455 7f82a5352700 7 mon.a@0(leader).osd e5 prepare_update osd_boot(osd.0 booted 0 features 576460752303423487 v0) v6 from osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.449459 7f82a5352700 7 mon.a@0(leader).osd e5 prepare_boot from osd.0 127.0.0.1:6800/4256 sb sb(c4878902-7748-4a77-afab-07655d3b0201 osd.0 ad2b7a59-c78c-449e-86c6-e537c3b12550 e0 [0,0] lci=[0,0]) cluster_addr 127.0.0.1:6801/4256 hb_back_addr 127.0.0.1:6802/4256 hb_front_addr 127.0.0.1:6803/4256 2016-03-08 15:19:04.449470 7f82a5352700 10 mon.a@0(leader).osd e5 setting osd.0 uuid to ad2b7a59-c78c-449e-86c6-e537c3b12550 2016-03-08 15:19:04.449478 7f82a5352700 10 mon.a@0(leader).osd e5 old osd_info: up_from 0 up_thru 0 down_at 0 last_clean_interval [0,0) 2016-03-08 15:19:04.449481 7f82a5352700 10 mon.a@0(leader).osd e5 not laggy, new xi down_stamp 0.000000 laggy_probability 0 laggy_interval 0 old_weight 0 2016-03-08 15:19:04.449490 7f82a5352700 10 mon.a@0(leader).osd e5 should_propose 2016-03-08 15:19:04.449500 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..5) setting proposal_timer 0x559cd14945d0 with delay of 0.0961835 2016-03-08 15:19:04.449506 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 1763 to dispatch throttler 1763/104857600 2016-03-08 15:19:04.449508 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1567200 2016-03-08 15:19:04.470178 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).reader couldn't read tag, (0) Success 2016-03-08 15:19:04.470209 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).fault (0) Success 2016-03-08 15:19:04.470251 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).fault on lossy channel, failing 2016-03-08 15:19:04.470258 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978000).stop 2016-03-08 15:19:04.470267 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978000).unregister_pipe 2016-03-08 15:19:04.470273 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978000).discard_queue 2016-03-08 15:19:04.470285 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978000).reader done 2016-03-08 15:19:04.470288 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978000).writer finishing 2016-03-08 15:19:04.470300 7f82a334e700 10 -- 127.0.0.1:7104/0 queue_reap 0x559cd1974000 2016-03-08 15:19:04.470300 7f82a5352700 10 mon.a@0(leader) e1 ms_handle_reset 0x559cd1978000 127.0.0.1:0/2227251016 2016-03-08 15:19:04.470306 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978000).writer done 2016-03-08 15:19:04.470308 7f82a5352700 10 mon.a@0(leader) e1 reset/close on session client.? 127.0.0.1:0/2227251016 2016-03-08 15:19:04.470311 7f82a5352700 10 mon.a@0(leader) e1 remove_session 0x559cd1857600 client.? 127.0.0.1:0/2227251016 2016-03-08 15:19:04.470309 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper 2016-03-08 15:19:04.470314 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaping pipe 0x559cd1974000 127.0.0.1:0/2227251016 2016-03-08 15:19:04.470317 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978000).discard_queue 2016-03-08 15:19:04.470322 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978000).unregister_pipe - not registered 2016-03-08 15:19:04.470330 7f82a6354700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2227251016 pipe(0x559cd1974000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978000).join 2016-03-08 15:19:04.470342 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaped pipe 0x559cd1974000 127.0.0.1:0/2227251016 2016-03-08 15:19:04.470351 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper deleted pipe 0x559cd1974000 2016-03-08 15:19:04.470352 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper done 2016-03-08 15:19:04.545763 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..12) propose_pending 2016-03-08 15:19:04.545777 7f82a5b53700 10 mon.a@0(leader).log v12 encode_full log v 12 2016-03-08 15:19:04.545803 7f82a5b53700 10 mon.a@0(leader).log v12 encode_pending v13 2016-03-08 15:19:04.545808 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..23) queue_pending_finisher 0x559cd14945c0 2016-03-08 15:19:04.545810 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..23) trigger_propose active, proposing now 2016-03-08 15:19:04.545814 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..23) propose_pending 24 7665 bytes 2016-03-08 15:19:04.545816 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..23) begin for 24 7665 bytes 2016-03-08 15:19:04.553600 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..23) commit_start 24 2016-03-08 15:19:04.553643 7f82a5b53700 10 mon.a@0(leader).paxosservice(osdmap 1..5) propose_pending 2016-03-08 15:19:04.553646 7f82a5b53700 10 mon.a@0(leader).osd e5 encode_pending e 6 2016-03-08 15:19:04.553649 7f82a5b53700 2 mon.a@0(leader).osd e5 osd.0 UP 127.0.0.1:6800/4256 2016-03-08 15:19:04.553657 7f82a5b53700 2 mon.a@0(leader).osd e5 osd.0 IN 2016-03-08 15:19:04.553699 7f82a5b53700 20 mon.a@0(leader).osd e5 full_crc 2904793011 inc_crc 3973464293 2016-03-08 15:19:04.553714 7f82a5b53700 5 mon.a@0(leader).paxos(paxos writing c 1..23) queue_pending_finisher 0x559cd1494390 2016-03-08 15:19:04.553718 7f82a5b53700 10 mon.a@0(leader).paxos(paxos writing c 1..23) trigger_propose not active, will propose later 2016-03-08 15:19:04.564665 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..23) commit_finish 24 2016-03-08 15:19:04.564724 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:04.564746 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..6) refresh 2016-03-08 15:19:04.564758 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:04.564770 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..5) refresh 2016-03-08 15:19:04.564782 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..13) refresh 2016-03-08 15:19:04.564785 7f82a6b55700 10 mon.a@0(leader).log v13 update_from_paxos 2016-03-08 15:19:04.564787 7f82a6b55700 10 mon.a@0(leader).log v13 update_from_paxos version 13 summary v 12 2016-03-08 15:19:04.564793 7f82a6b55700 10 mon.a@0(leader).log v13 update_from_paxos latest full 12 2016-03-08 15:19:04.564804 7f82a6b55700 7 mon.a@0(leader).log v13 update_from_paxos applying incremental log 13 2016-03-08 15:19:04.425854 mon.0 127.0.0.1:7104/0 24 : audit [INF] from='client.? 127.0.0.1:0/2227251016' entity='client.admin' cmd='[{"prefix": "osd crush create-or-move", "args": ["root=default", "host=localhost"], "id": 0, "weight": 1.0}]': finished 2016-03-08 15:19:04.564816 7f82a6b55700 20 mon.a@0(leader).log v13 update_from_paxos logging for channel 'audit' to file 'testdir/osd-crush/log' 2016-03-08 15:19:04.564832 7f82a6b55700 7 mon.a@0(leader).log v13 update_from_paxos applying incremental log 13 2016-03-08 15:19:04.438506 mon.0 127.0.0.1:7104/0 25 : cluster [INF] osdmap e5: 1 osds: 0 up, 0 in 2016-03-08 15:19:04.564838 7f82a6b55700 20 mon.a@0(leader).log v13 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:04.564845 7f82a6b55700 7 mon.a@0(leader).log v13 update_from_paxos applying incremental log 13 2016-03-08 15:19:04.446119 mon.0 127.0.0.1:7104/0 26 : cluster [INF] pgmap v6: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:04.564852 7f82a6b55700 20 mon.a@0(leader).log v13 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:04.564859 7f82a6b55700 15 mon.a@0(leader).log v13 update_from_paxos logging for 2 channels 2016-03-08 15:19:04.564860 7f82a6b55700 15 mon.a@0(leader).log v13 update_from_paxos channel 'audit' logging 253 bytes 2016-03-08 15:19:04.564875 7f82a6b55700 15 mon.a@0(leader).log v13 update_from_paxos channel 'cluster' logging 240 bytes 2016-03-08 15:19:04.564889 7f82a6b55700 10 mon.a@0(leader).log v13 check_subs 2016-03-08 15:19:04.564905 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:04.564916 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) refresh 2016-03-08 15:19:04.564919 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos 2016-03-08 15:19:04.564921 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..6) post_refresh 2016-03-08 15:19:04.564923 7f82a6b55700 10 mon.a@0(leader).pg v6 post_paxos_update 2016-03-08 15:19:04.564924 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:04.564925 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..5) post_refresh 2016-03-08 15:19:04.564926 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..13) post_refresh 2016-03-08 15:19:04.564930 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:04.564931 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) post_refresh 2016-03-08 15:19:04.564933 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..24) commit_proposal 2016-03-08 15:19:04.564936 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..13) _active - not active 2016-03-08 15:19:04.564938 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..24) finish_round 2016-03-08 15:19:04.564940 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..24) finish_round waiting_for_acting 2016-03-08 15:19:04.564942 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..13) _active 2016-03-08 15:19:04.564943 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..13) remove_legacy_versions 2016-03-08 15:19:04.564962 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..13) _active creating new pending 2016-03-08 15:19:04.564971 7f82a6b55700 10 mon.a@0(leader).log v13 create_pending v 14 2016-03-08 15:19:04.564981 7f82a6b55700 7 mon.a@0(leader).log v13 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.564993 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd1997320 0x559cd15573c0 log(last 24) v1 2016-03-08 15:19:04.564997 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 24) v1 -- ?+0 0x559cd15573c0 con 0x559cd1477080 2016-03-08 15:19:04.565002 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 24) v1 local 2016-03-08 15:19:04.565029 7f82a6b55700 7 mon.a@0(leader).log v13 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.565053 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd19977a0 0x559cd1554fc0 log(last 25) v1 2016-03-08 15:19:04.565056 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 25) v1 -- ?+0 0x559cd1554fc0 con 0x559cd1477080 2016-03-08 15:19:04.565060 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 25) v1 local 2016-03-08 15:19:04.565052 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 24) v1 ==== 0+0+0 (0 0 0) 0x559cd15573c0 con 0x559cd1477080 2016-03-08 15:19:04.565082 7f82a6b55700 7 mon.a@0(leader).log v13 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.565093 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd1999600 0x559cd15546c0 log(last 26) v1 2016-03-08 15:19:04.565096 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 26) v1 -- ?+0 0x559cd15546c0 con 0x559cd1477080 2016-03-08 15:19:04.565102 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 26) v1 local 2016-03-08 15:19:04.565121 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..24) finish_round waiting_for_readable 2016-03-08 15:19:04.565123 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..24) finish_round waiting_for_writeable 2016-03-08 15:19:04.565124 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..24) finish_round done w/ waiters, state 1 2016-03-08 15:19:04.565132 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..24) propose_pending 25 3361 bytes 2016-03-08 15:19:04.565135 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..24) begin for 25 3361 bytes 2016-03-08 15:19:04.568867 7f82a4350700 20 accepter.accepter poll got 1 2016-03-08 15:19:04.568873 7f82a4350700 10 accepter.pfd.revents=1 2016-03-08 15:19:04.568880 7f82a4350700 10 accepter.accepted incoming on sd 21 2016-03-08 15:19:04.568900 7f82a4350700 20 accepter.accepter calling poll 2016-03-08 15:19:04.568921 7f82aea26700 10 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd1976800 sd=21 :0 s=0 pgs=0 cs=0 l=0 c=0x559cd1978300).accept 2016-03-08 15:19:04.568972 7f82aea26700 1 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd1976800 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1978300).accept sd=21 127.0.0.1:52232/0 2016-03-08 15:19:04.569034 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1978300).accept peer addr is 127.0.0.1:0/2303057837 2016-03-08 15:19:04.569043 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1978300).accept got peer connect_seq 0 global_seq 1 2016-03-08 15:19:04.569046 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978300).accept of host_type 8, policy.lossy=1 policy.server=1 policy.standby=0 policy.resetcheck=0 2016-03-08 15:19:04.569050 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978300).accept my proto 15, their proto 15 2016-03-08 15:19:04.569059 7f82aea26700 10 mon.a@0(leader) e1 ms_verify_authorizer 127.0.0.1:0/2303057837 client protocol 0 2016-03-08 15:19:04.569063 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978300).accept: setting up session_security. 2016-03-08 15:19:04.569065 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978300).accept new session 2016-03-08 15:19:04.569067 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).accept success, connect_seq = 1, sending READY 2016-03-08 15:19:04.569069 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).accept features 576460752303423487 2016-03-08 15:19:04.569074 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).register_pipe 2016-03-08 15:19:04.569086 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).discard_requeued_up_to 0 2016-03-08 15:19:04.569090 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).accept starting writer, state open 2016-03-08 15:19:04.569100 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).accept done 2016-03-08 15:19:04.569105 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader reading tag... 2016-03-08 15:19:04.569107 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.569113 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sleeping 2016-03-08 15:19:04.569116 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got KEEPALIVE2 2016-03-08 15:19:04.569106 2016-03-08 15:19:04.569126 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader reading tag... 2016-03-08 15:19:04.569129 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.569133 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).write_keepalive2 15 2016-03-08 15:19:04.569106 2016-03-08 15:19:04.569140 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got MSG 2016-03-08 15:19:04.569144 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got envelope type=17 src client.? front=60 data=0 off 0 2016-03-08 15:19:04.569150 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader wants 60 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.569155 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.569156 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader wants 60 from dispatch throttler 0/104857600 2016-03-08 15:19:04.569158 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sleeping 2016-03-08 15:19:04.569162 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got front 60 2016-03-08 15:19:04.569165 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).aborted = 0 2016-03-08 15:19:04.569168 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got 60 + 0 + 0 byte message 2016-03-08 15:19:04.569176 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got message 1 0x559cd17cb680 auth(proto 0 30 bytes epoch 0) v1 2016-03-08 15:19:04.569182 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd17cb680 prio 127 2016-03-08 15:19:04.569185 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader reading tag... 2016-03-08 15:19:04.569187 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.569191 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).write_ack 1 2016-03-08 15:19:04.569198 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.569209 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sleeping 2016-03-08 15:19:04.572050 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..24) commit_start 25 2016-03-08 15:19:04.572114 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.572120 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.572125 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.572126 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.572127 7f82a5352700 20 allow all 2016-03-08 15:19:04.572139 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15573c0 2016-03-08 15:19:04.572143 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 25) v1 ==== 0+0+0 (0 0 0) 0x559cd1554fc0 con 0x559cd1477080 2016-03-08 15:19:04.572163 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.572165 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.572168 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.572169 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.572169 7f82a5352700 20 allow all 2016-03-08 15:19:04.572176 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1554fc0 2016-03-08 15:19:04.572179 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 26) v1 ==== 0+0+0 (0 0 0) 0x559cd15546c0 con 0x559cd1477080 2016-03-08 15:19:04.572188 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.572189 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.572192 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.572192 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.572193 7f82a5352700 20 allow all 2016-03-08 15:19:04.572200 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15546c0 2016-03-08 15:19:04.572204 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/2303057837 1 ==== auth(proto 0 30 bytes epoch 0) v1 ==== 60+0+0 (900162395 0 0) 0x559cd17cb680 con 0x559cd1978300 2016-03-08 15:19:04.572224 7f82a5352700 10 mon.a@0(leader) e1 _ms_dispatch new session 0x559cd1857600 MonSession(client.? 127.0.0.1:0/2303057837 is open) 2016-03-08 15:19:04.572226 7f82a5352700 20 mon.a@0(leader) e1 caps 2016-03-08 15:19:04.572231 7f82a5352700 10 mon.a@0(leader).paxosservice(auth 1..3) dispatch 0x559cd17cb680 auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/2303057837 con 0x559cd1978300 2016-03-08 15:19:04.572234 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..24) is_readable = 1 - now=2016-03-08 15:19:04.572235 lease_expire=0.000000 has v0 lc 24 2016-03-08 15:19:04.572239 7f82a5352700 10 mon.a@0(leader).auth v3 preprocess_query auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/2303057837 2016-03-08 15:19:04.572242 7f82a5352700 10 mon.a@0(leader).auth v3 prep_auth() blob_size=30 2016-03-08 15:19:04.572246 7f82a5352700 10 mon.a@0(leader).auth v3 AuthMonitor::assign_global_id m=auth(proto 0 30 bytes epoch 0) v1 mon=0/1 last_allocated=4103 max_global_id=14096 2016-03-08 15:19:04.572248 7f82a5352700 10 mon.a@0(leader).auth v3 next_global_id should be 4104 2016-03-08 15:19:04.572252 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2303057837 -- mon_map magic: 0 v1 -- ?+0 0x559cd15546c0 con 0x559cd1978300 2016-03-08 15:19:04.572256 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/2303057837, have pipe. 2016-03-08 15:19:04.572266 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.572270 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd19d0240 0x559cd1560f00 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:04.572277 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer encoding 1 features 576460752303423487 0x559cd15546c0 mon_map magic: 0 v1 2016-03-08 15:19:04.572277 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2303057837 -- auth_reply(proto 1 0 (0) Success) v1 -- ?+0 0x559cd1560f00 con 0x559cd1978300 2016-03-08 15:19:04.572293 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer signed seq # 1): sig = 0 2016-03-08 15:19:04.572299 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sending 1 0x559cd15546c0 2016-03-08 15:19:04.572303 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message auth_reply(proto 1 0 (0) Success) v1 remote, 127.0.0.1:0/2303057837, have pipe. 2016-03-08 15:19:04.572326 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 60 to dispatch throttler 60/104857600 2016-03-08 15:19:04.572327 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.572331 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17cb680 2016-03-08 15:19:04.572333 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer encoding 2 features 576460752303423487 0x559cd1560f00 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:04.572343 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer signed seq # 2): sig = 0 2016-03-08 15:19:04.572349 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sending 2 0x559cd1560f00 2016-03-08 15:19:04.572364 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.572368 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sleeping 2016-03-08 15:19:04.572458 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got ACK 2016-03-08 15:19:04.572472 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got ack seq 1 2016-03-08 15:19:04.572477 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader reading tag... 2016-03-08 15:19:04.572481 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got ACK 2016-03-08 15:19:04.572484 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got ack seq 2 2016-03-08 15:19:04.572486 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader reading tag... 2016-03-08 15:19:04.572488 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got MSG 2016-03-08 15:19:04.572491 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got envelope type=15 src client.? front=23 data=0 off 0 2016-03-08 15:19:04.572494 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader wants 23 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.572497 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader wants 23 from dispatch throttler 0/104857600 2016-03-08 15:19:04.572501 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got front 23 2016-03-08 15:19:04.572503 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).aborted = 0 2016-03-08 15:19:04.572506 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:04.572514 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got message 2 0x559cd1859e00 mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:04.572518 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1859e00 prio 127 2016-03-08 15:19:04.572523 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader reading tag... 2016-03-08 15:19:04.572527 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.572528 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/2303057837 2 ==== mon_subscribe({monmap=0+}) v2 ==== 23+0+0 (1620593354 0 0) 0x559cd1859e00 con 0x559cd1978300 2016-03-08 15:19:04.572538 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).write_ack 2 2016-03-08 15:19:04.572540 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got MSG 2016-03-08 15:19:04.572543 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got envelope type=15 src client.4104 front=23 data=0 off 0 2016-03-08 15:19:04.572546 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader wants 23 bytes from policy throttler 23/104857600 2016-03-08 15:19:04.572548 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader wants 23 from dispatch throttler 23/104857600 2016-03-08 15:19:04.572546 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.572551 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got front 23 2016-03-08 15:19:04.572553 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).aborted = 0 2016-03-08 15:19:04.572552 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sleeping 2016-03-08 15:19:04.572555 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:04.572569 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got message 3 0x559cd19e8000 mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:04.572569 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857600 for client.? 127.0.0.1:0/2303057837 2016-03-08 15:19:04.572572 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd19e8000 prio 127 2016-03-08 15:19:04.572573 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.572576 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader reading tag... 2016-03-08 15:19:04.572579 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.572581 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.572582 7f82a5352700 20 allow all 2016-03-08 15:19:04.572579 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.572583 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:04.572585 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).write_ack 3 2016-03-08 15:19:04.572588 7f82a5352700 10 mon.a@0(leader) e1 check_sub monmap next 0 have 1 2016-03-08 15:19:04.572590 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.572593 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2303057837 -- mon_map magic: 0 v1 -- ?+0 0x559cd1554fc0 con 0x559cd1978300 2016-03-08 15:19:04.572594 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sleeping 2016-03-08 15:19:04.572604 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/2303057837, have pipe. 2016-03-08 15:19:04.572618 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 46/104857600 2016-03-08 15:19:04.572621 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1859e00 2016-03-08 15:19:04.572620 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.572626 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4104 127.0.0.1:0/2303057837 3 ==== mon_subscribe({osdmap=0}) v2 ==== 23+0+0 (319581110 0 0) 0x559cd19e8000 con 0x559cd1978300 2016-03-08 15:19:04.572627 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer encoding 3 features 576460752303423487 0x559cd1554fc0 mon_map magic: 0 v1 2016-03-08 15:19:04.572650 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857600 for client.? 127.0.0.1:0/2303057837 2016-03-08 15:19:04.572671 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.572676 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.572680 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.572634 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer signed seq # 3): sig = 0 2016-03-08 15:19:04.572681 7f82a5352700 20 allow all 2016-03-08 15:19:04.572682 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:04.572685 7f82a5352700 20 is_capable service=osd command= read on cap allow * 2016-03-08 15:19:04.572683 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sending 3 0x559cd1554fc0 2016-03-08 15:19:04.572688 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.572689 7f82a5352700 20 allow all 2016-03-08 15:19:04.572690 7f82a5352700 10 mon.a@0(leader).osd e5 check_sub 0x559cd188c0a0 next 0 (onetime) 2016-03-08 15:19:04.572695 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2303057837 -- osd_map(5..5 src has 1..5) v3 -- ?+0 0x559cd17cb680 con 0x559cd1978300 2016-03-08 15:19:04.572701 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message osd_map(5..5 src has 1..5) v3 remote, 127.0.0.1:0/2303057837, have pipe. 2016-03-08 15:19:04.572715 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 23/104857600 2016-03-08 15:19:04.572721 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd19e8000 2016-03-08 15:19:04.572721 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.572728 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer encoding 4 features 576460752303423487 0x559cd17cb680 osd_map(5..5 src has 1..5) v3 2016-03-08 15:19:04.572750 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer signed seq # 4): sig = 0 2016-03-08 15:19:04.572757 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sending 4 0x559cd17cb680 2016-03-08 15:19:04.572780 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.572785 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sleeping 2016-03-08 15:19:04.575763 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got ACK 2016-03-08 15:19:04.575774 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got ack seq 3 2016-03-08 15:19:04.575787 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader reading tag... 2016-03-08 15:19:04.575790 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got ACK 2016-03-08 15:19:04.575792 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got ack seq 4 2016-03-08 15:19:04.575794 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader reading tag... 2016-03-08 15:19:04.575796 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got MSG 2016-03-08 15:19:04.575799 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got envelope type=50 src client.4104 front=80 data=0 off 0 2016-03-08 15:19:04.575802 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader wants 80 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.575805 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader wants 80 from dispatch throttler 0/104857600 2016-03-08 15:19:04.575810 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got front 80 2016-03-08 15:19:04.575813 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).aborted = 0 2016-03-08 15:19:04.575815 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got 80 + 0 + 0 byte message 2016-03-08 15:19:04.575822 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got message 4 0x559cd1555200 mon_command({"prefix": "get_command_descriptions"} v 0) v1 2016-03-08 15:19:04.575827 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1555200 prio 127 2016-03-08 15:19:04.575832 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader reading tag... 2016-03-08 15:19:04.575835 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.575837 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4104 127.0.0.1:0/2303057837 4 ==== mon_command({"prefix": "get_command_descriptions"} v 0) v1 ==== 80+0+0 (3134233619 0 0) 0x559cd1555200 con 0x559cd1978300 2016-03-08 15:19:04.575850 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).write_ack 4 2016-03-08 15:19:04.575859 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.575863 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sleeping 2016-03-08 15:19:04.575877 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857600 for client.? 127.0.0.1:0/2303057837 2016-03-08 15:19:04.575881 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.576930 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd19d0c60 0x559cd15573c0 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:04.576933 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2303057837 -- mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 -- ?+37345 0x559cd15573c0 con 0x559cd1978300 2016-03-08 15:19:04.576936 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 remote, 127.0.0.1:0/2303057837, have pipe. 2016-03-08 15:19:04.576942 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.576953 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 80 to dispatch throttler 80/104857600 2016-03-08 15:19:04.576956 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555200 2016-03-08 15:19:04.576955 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer encoding 5 features 576460752303423487 0x559cd15573c0 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:04.576966 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer signed seq # 5): sig = 0 2016-03-08 15:19:04.576969 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sending 5 0x559cd15573c0 2016-03-08 15:19:04.576991 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.576995 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sleeping 2016-03-08 15:19:04.578527 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..24) commit_finish 25 2016-03-08 15:19:04.578558 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:04.578580 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..6) refresh 2016-03-08 15:19:04.578595 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:04.578607 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..6) refresh 2016-03-08 15:19:04.578610 7f82a6b55700 15 mon.a@0(leader).osd e5 update_from_paxos paxos e 6, my e 5 2016-03-08 15:19:04.578626 7f82a6b55700 7 mon.a@0(leader).osd e5 update_from_paxos applying incremental 6 2016-03-08 15:19:04.578669 7f82a6b55700 1 mon.a@0(leader).osd e6 e6: 1 osds: 1 up, 1 in 2016-03-08 15:19:04.585201 7f82a6b55700 5 mon.a@0(leader).paxos(paxos refresh c 1..25) is_readable = 0 - now=2016-03-08 15:19:04.585203 lease_expire=0.000000 has v0 lc 25 2016-03-08 15:19:04.585211 7f82a6b55700 10 mon.a@0(leader).pg v6 check_osd_map -- osdmap not readable, waiting 2016-03-08 15:19:04.585214 7f82a6b55700 10 mon.a@0(leader).osd e6 check_subs 2016-03-08 15:19:04.585219 7f82a6b55700 10 mon.a@0(leader).osd e6 committed, telling random osd.0 127.0.0.1:6800/4256 all about it 2016-03-08 15:19:04.585224 7f82a6b55700 10 mon.a@0(leader).osd e6 build_incremental [5..6] 2016-03-08 15:19:04.585230 7f82a6b55700 20 mon.a@0(leader).osd e6 build_incremental inc 6 780 bytes 2016-03-08 15:19:04.585232 7f82a6b55700 20 mon.a@0(leader).osd e6 build_incremental inc 5 571 bytes 2016-03-08 15:19:04.585234 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:6800/4256 -- osd_map(5..6 src has 1..6) v3 -- ?+0 0x559cd1563980 con 0x559cd1978180 2016-03-08 15:19:04.585240 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message osd_map(5..6 src has 1..6) v3 remote, 127.0.0.1:6800/4256, have pipe. 2016-03-08 15:19:04.585247 7f82a6b55700 10 mon.a@0(leader).osd e6 update_logger 2016-03-08 15:19:04.585263 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.585272 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..13) refresh 2016-03-08 15:19:04.585275 7f82a6b55700 10 mon.a@0(leader).log v13 update_from_paxos 2016-03-08 15:19:04.585274 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer encoding 7 features 576460752303423487 0x559cd1563980 osd_map(5..6 src has 1..6) v3 2016-03-08 15:19:04.585277 7f82a6b55700 10 mon.a@0(leader).log v13 update_from_paxos version 13 summary v 13 2016-03-08 15:19:04.585283 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer signed seq # 7): sig = 0 2016-03-08 15:19:04.585286 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sending 7 0x559cd1563980 2016-03-08 15:19:04.585290 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:04.585300 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) refresh 2016-03-08 15:19:04.585302 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos 2016-03-08 15:19:04.585303 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..6) post_refresh 2016-03-08 15:19:04.585305 7f82a6b55700 10 mon.a@0(leader).pg v6 post_paxos_update 2016-03-08 15:19:04.585306 7f82a6b55700 10 mon.a@0(leader).pg v6 check_subs 2016-03-08 15:19:04.585309 7f82a6b55700 20 mon.a@0(leader).pg v6 check_subs .. osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.585313 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:04.585313 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.585314 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..6) post_refresh 2016-03-08 15:19:04.585316 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..13) post_refresh 2016-03-08 15:19:04.585315 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.585317 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:04.585318 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) post_refresh 2016-03-08 15:19:04.585320 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..25) commit_proposal 2016-03-08 15:19:04.585322 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..6) _active - not active 2016-03-08 15:19:04.585324 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..25) finish_round 2016-03-08 15:19:04.585326 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..25) finish_round waiting_for_acting 2016-03-08 15:19:04.585327 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..6) _active 2016-03-08 15:19:04.585329 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..6) remove_legacy_versions 2016-03-08 15:19:04.585335 7f82a6b55700 7 mon.a@0(leader).paxosservice(osdmap 1..6) _active creating new pending 2016-03-08 15:19:04.585340 7f82a6b55700 10 mon.a@0(leader).osd e6 create_pending e 7 2016-03-08 15:19:04.585366 7f82a6b55700 7 mon.a@0(leader).osd e6 _booted osd.0 127.0.0.1:6800/4256 w 0 from 0 2016-03-08 15:19:04.585380 7f82a6b55700 0 log_channel(cluster) log [INF] : osd.0 127.0.0.1:6800/4256 boot 2016-03-08 15:19:04.585394 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 27 at 2016-03-08 15:19:04.585383) v1 -- ?+0 0x559cd1563700 con 0x559cd1477080 2016-03-08 15:19:04.585400 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 27 at 2016-03-08 15:19:04.585383) v1 local 2016-03-08 15:19:04.585419 7f82a6b55700 5 mon.a@0(leader).osd e6 send_latest to osd.0 127.0.0.1:6800/4256 start 1 2016-03-08 15:19:04.585436 7f82a6b55700 5 mon.a@0(leader).osd e6 send_incremental [1..6] to osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.585440 7f82a6b55700 10 mon.a@0(leader).osd e6 send_incrementalosd.0 127.0.0.1:6800/4256 should already have epoch 4 2016-03-08 15:19:04.585442 7f82a6b55700 10 mon.a@0(leader).osd e6 build_incremental [5..6] 2016-03-08 15:19:04.585445 7f82a6b55700 20 mon.a@0(leader).osd e6 build_incremental inc 6 780 bytes 2016-03-08 15:19:04.585447 7f82a6b55700 20 mon.a@0(leader).osd e6 build_incremental inc 5 571 bytes 2016-03-08 15:19:04.585440 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 27 at 2016-03-08 15:19:04.585383) v1 ==== 0+0+0 (0 0 0) 0x559cd1563700 con 0x559cd1477080 2016-03-08 15:19:04.585460 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd1999cc0 0x559cd1563480 osd_map(5..6 src has 1..6) v3 2016-03-08 15:19:04.585463 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:6800/4256 -- osd_map(5..6 src has 1..6) v3 -- ?+0 0x559cd1563480 con 0x559cd1978180 2016-03-08 15:19:04.585467 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message osd_map(5..6 src has 1..6) v3 remote, 127.0.0.1:6800/4256, have pipe. 2016-03-08 15:19:04.585487 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.585495 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..25) is_readable = 1 - now=2016-03-08 15:19:04.585495 lease_expire=0.000000 has v0 lc 25 2016-03-08 15:19:04.585496 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer encoding 8 features 576460752303423487 0x559cd1563480 osd_map(5..6 src has 1..6) v3 2016-03-08 15:19:04.585499 7f82a6b55700 10 mon.a@0(leader).pg v6 check_osd_map applying osdmap e6 to pg_map 2016-03-08 15:19:04.585503 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer signed seq # 8): sig = 0 2016-03-08 15:19:04.585505 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sending 8 0x559cd1563480 2016-03-08 15:19:04.585508 7f82a6b55700 10 mon.a@0(leader).pg v6 map_pg_creates to 4 pgs, osdmap epoch 6 2016-03-08 15:19:04.585528 7f82a6b55700 20 mon.a@0(leader).pg v6 map_pg_creates 1.0 acting_primary: -1 -> 0 acting: [] -> [0] up_primary: -1 -> 0 up: [] -> [0] 2016-03-08 15:19:04.585531 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.585533 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.585536 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ACK 2016-03-08 15:19:04.585543 7f82a324d700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ack seq 7 2016-03-08 15:19:04.585548 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.585563 7f82a6b55700 20 mon.a@0(leader).pg v6 map_pg_creates 1.1 acting_primary: -1 -> 0 acting: [] -> [0] up_primary: -1 -> 0 up: [] -> [0] 2016-03-08 15:19:04.585578 7f82a6b55700 20 mon.a@0(leader).pg v6 map_pg_creates 1.2 acting_primary: -1 -> 0 acting: [] -> [0] up_primary: -1 -> 0 up: [] -> [0] 2016-03-08 15:19:04.585591 7f82a6b55700 20 mon.a@0(leader).pg v6 map_pg_creates 1.3 acting_primary: -1 -> 0 acting: [] -> [0] up_primary: -1 -> 0 up: [] -> [0] 2016-03-08 15:19:04.585594 7f82a6b55700 10 mon.a@0(leader).pg v6 map_pg_creates 4 pgs changed primary 2016-03-08 15:19:04.585596 7f82a6b55700 10 mon.a@0(leader).pg v6 register_new_pgs checking pg pools for osdmap epoch 6, last_pg_scan 5 2016-03-08 15:19:04.585597 7f82a6b55700 10 mon.a@0(leader).pg v6 no change in pool 1 replicated size 3 min_size 2 crush_ruleset 0 object_hash rjenkins pg_num 4 pgp_num 4 last_change 3 flags hashpspool stripe_width 0 2016-03-08 15:19:04.585602 7f82a6b55700 10 mon.a@0(leader).pg v6 register_new_pgs registered 0 new pgs, removed 0 uncreated pgs 2016-03-08 15:19:04.585604 7f82a6b55700 10 mon.a@0(leader).pg v6 check_down_pgs 2016-03-08 15:19:04.585606 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..6) propose_pending 2016-03-08 15:19:04.585609 7f82a6b55700 10 mon.a@0(leader).pg v6 encode_pending v 7 2016-03-08 15:19:04.585633 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..25) queue_pending_finisher 0x559cd14945c0 2016-03-08 15:19:04.585637 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..25) trigger_propose active, proposing now 2016-03-08 15:19:04.585646 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..25) propose_pending 26 2842 bytes 2016-03-08 15:19:04.585648 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..25) begin for 26 2842 bytes 2016-03-08 15:19:04.592449 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..25) commit_start 26 2016-03-08 15:19:04.592487 7f82a6b55700 10 mon.a@0(leader).osd e6 update_logger 2016-03-08 15:19:04.592499 7f82a6b55700 0 log_channel(cluster) log [INF] : osdmap e6: 1 osds: 1 up, 1 in 2016-03-08 15:19:04.592516 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 28 at 2016-03-08 15:19:04.592501) v1 -- ?+0 0x559cd1563200 con 0x559cd1477080 2016-03-08 15:19:04.592523 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 28 at 2016-03-08 15:19:04.592501) v1 local 2016-03-08 15:19:04.592531 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..25) finish_round waiting_for_readable 2016-03-08 15:19:04.592533 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..25) finish_round waiting_for_writeable 2016-03-08 15:19:04.592535 7f82a6b55700 10 mon.a@0(leader).paxos(paxos writing c 1..25) finish_round done w/ waiters, state 4 2016-03-08 15:19:04.592582 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.592589 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.592602 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..13) dispatch 0x559cd1563700 log(1 entries from seq 27 at 2016-03-08 15:19:04.585383) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.592610 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..25) is_readable = 1 - now=2016-03-08 15:19:04.592611 lease_expire=0.000000 has v0 lc 25 2016-03-08 15:19:04.592622 7f82a5352700 10 mon.a@0(leader).log v13 preprocess_query log(1 entries from seq 27 at 2016-03-08 15:19:04.585383) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.592633 7f82a5352700 10 mon.a@0(leader).log v13 preprocess_log log(1 entries from seq 27 at 2016-03-08 15:19:04.585383) v1 from mon.0 2016-03-08 15:19:04.592638 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.592641 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.592642 7f82a5352700 20 allow all 2016-03-08 15:19:04.592654 7f82a5352700 10 mon.a@0(leader).log v13 prepare_update log(1 entries from seq 27 at 2016-03-08 15:19:04.585383) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.592671 7f82a5352700 10 mon.a@0(leader).log v13 prepare_log log(1 entries from seq 27 at 2016-03-08 15:19:04.585383) v1 from mon.0 2016-03-08 15:19:04.592685 7f82a5352700 10 mon.a@0(leader).log v13 logging 2016-03-08 15:19:04.585383 mon.0 127.0.0.1:7104/0 27 : cluster [INF] osd.0 127.0.0.1:6800/4256 boot 2016-03-08 15:19:04.592701 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..13) setting proposal_timer 0x559cd14945e0 with delay of 0.0858319 2016-03-08 15:19:04.592715 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1563700 2016-03-08 15:19:04.592722 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 28 at 2016-03-08 15:19:04.592501) v1 ==== 0+0+0 (0 0 0) 0x559cd1563200 con 0x559cd1477080 2016-03-08 15:19:04.592762 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.592766 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.592777 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..13) dispatch 0x559cd1563200 log(1 entries from seq 28 at 2016-03-08 15:19:04.592501) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.592783 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..25) is_readable = 1 - now=2016-03-08 15:19:04.592784 lease_expire=0.000000 has v0 lc 25 2016-03-08 15:19:04.592791 7f82a5352700 10 mon.a@0(leader).log v13 preprocess_query log(1 entries from seq 28 at 2016-03-08 15:19:04.592501) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.592808 7f82a5352700 10 mon.a@0(leader).log v13 preprocess_log log(1 entries from seq 28 at 2016-03-08 15:19:04.592501) v1 from mon.0 2016-03-08 15:19:04.592813 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.592815 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.592815 7f82a5352700 20 allow all 2016-03-08 15:19:04.592823 7f82a5352700 10 mon.a@0(leader).log v13 prepare_update log(1 entries from seq 28 at 2016-03-08 15:19:04.592501) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.592838 7f82a5352700 10 mon.a@0(leader).log v13 prepare_log log(1 entries from seq 28 at 2016-03-08 15:19:04.592501) v1 from mon.0 2016-03-08 15:19:04.592843 7f82a5352700 10 mon.a@0(leader).log v13 logging 2016-03-08 15:19:04.592501 mon.0 127.0.0.1:7104/0 28 : cluster [INF] osdmap e6: 1 osds: 1 up, 1 in 2016-03-08 15:19:04.592857 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..13) proposal_timer already set 2016-03-08 15:19:04.592860 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1563200 2016-03-08 15:19:04.605445 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..25) commit_finish 26 2016-03-08 15:19:04.605477 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:04.605495 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..7) refresh 2016-03-08 15:19:04.605499 7f82a6b55700 10 mon.a@0(leader).pg v6 update_from_paxos read_incremental 2016-03-08 15:19:04.605512 7f82a6b55700 20 mon.a@0(leader).pg v6 refreshing pg 1.0 got 0 len 541 2016-03-08 15:19:04.605532 7f82a6b55700 20 mon.a@0(leader).pg v6 refreshing pg 1.1 got 0 len 541 2016-03-08 15:19:04.605541 7f82a6b55700 20 mon.a@0(leader).pg v6 refreshing pg 1.2 got 0 len 541 2016-03-08 15:19:04.605551 7f82a6b55700 20 mon.a@0(leader).pg v6 refreshing pg 1.3 got 0 len 541 2016-03-08 15:19:04.605559 7f82a6b55700 10 mon.a@0(leader).pg v7 read_pgmap_meta 2016-03-08 15:19:04.605577 7f82a6b55700 10 mon.a@0(leader).pg v7 update_logger 2016-03-08 15:19:04.605590 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:04.605607 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..6) refresh 2016-03-08 15:19:04.605621 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..13) refresh 2016-03-08 15:19:04.605624 7f82a6b55700 10 mon.a@0(leader).log v13 update_from_paxos 2016-03-08 15:19:04.605625 7f82a6b55700 10 mon.a@0(leader).log v13 update_from_paxos version 13 summary v 13 2016-03-08 15:19:04.605637 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:04.605649 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) refresh 2016-03-08 15:19:04.605651 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos 2016-03-08 15:19:04.605653 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..7) post_refresh 2016-03-08 15:19:04.605655 7f82a6b55700 10 mon.a@0(leader).pg v7 post_paxos_update 2016-03-08 15:19:04.605656 7f82a6b55700 10 mon.a@0(leader).pg v7 check_subs 2016-03-08 15:19:04.605658 7f82a6b55700 20 mon.a@0(leader).pg v7 check_subs .. osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.605662 7f82a6b55700 20 mon.a@0(leader).pg v7 send_pg_creates osd.0 from 0 : epoch 6 4 pgs 2016-03-08 15:19:04.605668 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:6800/4256 -- osd_pg_create(e6 1.0:3 1.1:3 1.2:3 1.3:3) v3 -- ?+0 0x559cd1556640 con 0x559cd1978180 2016-03-08 15:19:04.605673 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message osd_pg_create(e6 1.0:3 1.1:3 1.2:3 1.3:3) v3 remote, 127.0.0.1:6800/4256, have pipe. 2016-03-08 15:19:04.605681 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:04.605682 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..6) post_refresh 2016-03-08 15:19:04.605683 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..13) post_refresh 2016-03-08 15:19:04.605684 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:04.605685 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) post_refresh 2016-03-08 15:19:04.605687 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..26) commit_proposal 2016-03-08 15:19:04.605689 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..7) _active - not active 2016-03-08 15:19:04.605692 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..26) finish_round 2016-03-08 15:19:04.605693 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..26) finish_round waiting_for_acting 2016-03-08 15:19:04.605695 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..7) _active 2016-03-08 15:19:04.605696 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..7) remove_legacy_versions 2016-03-08 15:19:04.605697 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.605703 7f82a6b55700 7 mon.a@0(leader).paxosservice(pgmap 1..7) _active creating new pending 2016-03-08 15:19:04.605706 7f82a6b55700 10 mon.a@0(leader).pg v7 create_pending v 8 2016-03-08 15:19:04.605708 7f82a6b55700 10 mon.a@0(leader).pg v7 check_osd_map already seen 6 >= 6 2016-03-08 15:19:04.605707 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer encoding 9 features 576460752303423487 0x559cd1556640 osd_pg_create(e6 1.0:3 1.1:3 1.2:3 1.3:3) v3 2016-03-08 15:19:04.605709 7f82a6b55700 10 mon.a@0(leader).pg v7 update_logger 2016-03-08 15:19:04.605716 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer signed seq # 9): sig = 0 2016-03-08 15:19:04.605718 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sending 9 0x559cd1556640 2016-03-08 15:19:04.605721 7f82a6b55700 0 log_channel(cluster) log [INF] : pgmap v7: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:04.605734 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 29 at 2016-03-08 15:19:04.605722) v1 -- ?+0 0x559cd1562f80 con 0x559cd1477080 2016-03-08 15:19:04.605741 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 29 at 2016-03-08 15:19:04.605722) v1 local 2016-03-08 15:19:04.605748 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..26) finish_round waiting_for_readable 2016-03-08 15:19:04.605750 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..26) finish_round waiting_for_writeable 2016-03-08 15:19:04.605751 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..26) finish_round done w/ waiters, state 1 2016-03-08 15:19:04.605759 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.605764 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.605767 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ACK 2016-03-08 15:19:04.605779 7f82a324d700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ack seq 8 2016-03-08 15:19:04.605779 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 29 at 2016-03-08 15:19:04.605722) v1 ==== 0+0+0 (0 0 0) 0x559cd1562f80 con 0x559cd1477080 2016-03-08 15:19:04.605785 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.605810 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.605815 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.605825 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..13) dispatch 0x559cd1562f80 log(1 entries from seq 29 at 2016-03-08 15:19:04.605722) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.605832 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..26) is_readable = 1 - now=2016-03-08 15:19:04.605832 lease_expire=0.000000 has v0 lc 26 2016-03-08 15:19:04.605848 7f82a5352700 10 mon.a@0(leader).log v13 preprocess_query log(1 entries from seq 29 at 2016-03-08 15:19:04.605722) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.605860 7f82a5352700 10 mon.a@0(leader).log v13 preprocess_log log(1 entries from seq 29 at 2016-03-08 15:19:04.605722) v1 from mon.0 2016-03-08 15:19:04.605865 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.605867 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.605868 7f82a5352700 20 allow all 2016-03-08 15:19:04.605882 7f82a5352700 10 mon.a@0(leader).log v13 prepare_update log(1 entries from seq 29 at 2016-03-08 15:19:04.605722) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.605895 7f82a5352700 10 mon.a@0(leader).log v13 prepare_log log(1 entries from seq 29 at 2016-03-08 15:19:04.605722) v1 from mon.0 2016-03-08 15:19:04.605899 7f82a5352700 10 mon.a@0(leader).log v13 logging 2016-03-08 15:19:04.605722 mon.0 127.0.0.1:7104/0 29 : cluster [INF] pgmap v7: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:04.605913 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..13) proposal_timer already set 2016-03-08 15:19:04.605915 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1562f80 2016-03-08 15:19:04.625287 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ACK 2016-03-08 15:19:04.625303 7f82a324d700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got ack seq 9 2016-03-08 15:19:04.625316 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.625318 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got MSG 2016-03-08 15:19:04.625321 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got envelope type=73 src osd.0 front=22 data=0 off 0 2016-03-08 15:19:04.625325 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 22 bytes from policy throttler 0/419430400 2016-03-08 15:19:04.625328 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader wants 22 from dispatch throttler 0/104857600 2016-03-08 15:19:04.625333 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got front 22 2016-03-08 15:19:04.625335 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).aborted = 0 2016-03-08 15:19:04.625337 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got 22 + 0 + 0 byte message 2016-03-08 15:19:04.625348 7f82a324d700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader got message 8 0x559cd1857c00 osd_alive(want up_thru 6 have 6) v1 2016-03-08 15:19:04.625352 7f82a324d700 20 -- 127.0.0.1:7104/0 queue 0x559cd1857c00 prio 127 2016-03-08 15:19:04.625357 7f82a324d700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).reader reading tag... 2016-03-08 15:19:04.625364 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.625365 7f82a5352700 1 -- 127.0.0.1:7104/0 <== osd.0 127.0.0.1:6800/4256 8 ==== osd_alive(want up_thru 6 have 6) v1 ==== 22+0+0 (720929769 0 0) 0x559cd1857c00 con 0x559cd1978180 2016-03-08 15:19:04.625375 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).write_ack 8 2016-03-08 15:19:04.625381 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.625383 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.625391 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1858a00 for osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.625395 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.625401 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..6) dispatch 0x559cd1857c00 osd_alive(want up_thru 6 have 6) v1 from osd.0 127.0.0.1:6800/4256 con 0x559cd1978180 2016-03-08 15:19:04.625406 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..26) is_readable = 1 - now=2016-03-08 15:19:04.625406 lease_expire=0.000000 has v0 lc 26 2016-03-08 15:19:04.625418 7f82a5352700 10 mon.a@0(leader).osd e6 preprocess_query osd_alive(want up_thru 6 have 6) v1 from osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.625425 7f82a5352700 20 is_capable service=osd command= exec on cap allow * 2016-03-08 15:19:04.625427 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.625428 7f82a5352700 20 allow all 2016-03-08 15:19:04.625429 7f82a5352700 10 mon.a@0(leader).osd e6 preprocess_alive want up_thru 6 from osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.625434 7f82a5352700 7 mon.a@0(leader).osd e6 prepare_update osd_alive(want up_thru 6 have 6) v1 from osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.625437 7f82a5352700 7 mon.a@0(leader).osd e6 prepare_alive want up_thru 6 have 6 from osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.625445 7f82a5352700 10 mon.a@0(leader).osd e6 should_propose 2016-03-08 15:19:04.625446 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..6) setting proposal_timer 0x559cd1494660 with delay of 0.0800042 2016-03-08 15:19:04.625454 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 22 to dispatch throttler 22/104857600 2016-03-08 15:19:04.625458 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1857c00 2016-03-08 15:19:04.627400 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got ACK 2016-03-08 15:19:04.627411 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got ack seq 5 2016-03-08 15:19:04.627413 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader reading tag... 2016-03-08 15:19:04.627415 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got MSG 2016-03-08 15:19:04.627417 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got envelope type=50 src client.4104 front=64 data=0 off 0 2016-03-08 15:19:04.627420 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader wants 64 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.627423 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader wants 64 from dispatch throttler 0/104857600 2016-03-08 15:19:04.627426 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got front 64 2016-03-08 15:19:04.627428 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).aborted = 0 2016-03-08 15:19:04.627430 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got 64 + 0 + 0 byte message 2016-03-08 15:19:04.627438 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader got message 5 0x559cd1557cc0 mon_command({"prefix": "osd dump"} v 0) v1 2016-03-08 15:19:04.627441 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1557cc0 prio 127 2016-03-08 15:19:04.627445 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader reading tag... 2016-03-08 15:19:04.627450 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4104 127.0.0.1:0/2303057837 5 ==== mon_command({"prefix": "osd dump"} v 0) v1 ==== 64+0+0 (2082783643 0 0) 0x559cd1557cc0 con 0x559cd1978300 2016-03-08 15:19:04.627450 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.627461 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).write_ack 5 2016-03-08 15:19:04.627470 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.627472 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sleeping 2016-03-08 15:19:04.627476 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd1857600 for client.? 127.0.0.1:0/2303057837 2016-03-08 15:19:04.627480 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.627505 7f82a5352700 0 mon.a@0(leader) e1 handle_command mon_command({"prefix": "osd dump"} v 0) v1 2016-03-08 15:19:04.627515 7f82a5352700 20 is_capable service=osd command=osd dump read on cap allow * 2016-03-08 15:19:04.627517 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.627518 7f82a5352700 20 allow all 2016-03-08 15:19:04.627519 7f82a5352700 10 mon.a@0(leader) e1 _allowed_command capable 2016-03-08 15:19:04.627524 7f82a5352700 0 log_channel(audit) log [DBG] : from='client.? 127.0.0.1:0/2303057837' entity='client.admin' cmd=[{"prefix": "osd dump"}]: dispatch 2016-03-08 15:19:04.627535 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 30 at 2016-03-08 15:19:04.627526) v1 -- ?+0 0x559cd17cdc00 con 0x559cd1477080 2016-03-08 15:19:04.627540 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 30 at 2016-03-08 15:19:04.627526) v1 local 2016-03-08 15:19:04.627549 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..6) dispatch 0x559cd1557cc0 mon_command({"prefix": "osd dump"} v 0) v1 from client.4104 127.0.0.1:0/2303057837 con 0x559cd1978300 2016-03-08 15:19:04.627553 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..26) is_readable = 1 - now=2016-03-08 15:19:04.627553 lease_expire=0.000000 has v0 lc 26 2016-03-08 15:19:04.627558 7f82a5352700 10 mon.a@0(leader).osd e6 preprocess_query mon_command({"prefix": "osd dump"} v 0) v1 from client.4104 127.0.0.1:0/2303057837 2016-03-08 15:19:04.627604 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd19d3960 0x559cd1555200 mon_command_ack([{"prefix": "osd dump"}]=0 v6) v1 2016-03-08 15:19:04.627614 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/2303057837 -- mon_command_ack([{"prefix": "osd dump"}]=0 v6) v1 -- ?+500 0x559cd1555200 con 0x559cd1978300 2016-03-08 15:19:04.627618 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "osd dump"}]=0 v6) v1 remote, 127.0.0.1:0/2303057837, have pipe. 2016-03-08 15:19:04.627630 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 64 to dispatch throttler 64/104857600 2016-03-08 15:19:04.627632 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1557cc0 2016-03-08 15:19:04.627637 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 30 at 2016-03-08 15:19:04.627526) v1 ==== 0+0+0 (0 0 0) 0x559cd17cdc00 con 0x559cd1477080 2016-03-08 15:19:04.627642 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.627652 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer encoding 6 features 576460752303423487 0x559cd1555200 mon_command_ack([{"prefix": "osd dump"}]=0 v6) v1 2016-03-08 15:19:04.627660 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.627662 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.627663 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer signed seq # 6): sig = 0 2016-03-08 15:19:04.627666 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sending 6 0x559cd1555200 2016-03-08 15:19:04.627667 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..13) dispatch 0x559cd17cdc00 log(1 entries from seq 30 at 2016-03-08 15:19:04.627526) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.627671 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..26) is_readable = 1 - now=2016-03-08 15:19:04.627671 lease_expire=0.000000 has v0 lc 26 2016-03-08 15:19:04.627682 7f82a5352700 10 mon.a@0(leader).log v13 preprocess_query log(1 entries from seq 30 at 2016-03-08 15:19:04.627526) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.627687 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer: state = open policy.server=1 2016-03-08 15:19:04.627690 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).writer sleeping 2016-03-08 15:19:04.627696 7f82a5352700 10 mon.a@0(leader).log v13 preprocess_log log(1 entries from seq 30 at 2016-03-08 15:19:04.627526) v1 from mon.0 2016-03-08 15:19:04.627701 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.627702 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.627703 7f82a5352700 20 allow all 2016-03-08 15:19:04.627711 7f82a5352700 10 mon.a@0(leader).log v13 prepare_update log(1 entries from seq 30 at 2016-03-08 15:19:04.627526) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.627716 7f82a5352700 10 mon.a@0(leader).log v13 prepare_log log(1 entries from seq 30 at 2016-03-08 15:19:04.627526) v1 from mon.0 2016-03-08 15:19:04.627718 7f82a5352700 10 mon.a@0(leader).log v13 logging 2016-03-08 15:19:04.627526 mon.0 127.0.0.1:7104/0 30 : audit [DBG] from='client.? 127.0.0.1:0/2303057837' entity='client.admin' cmd=[{"prefix": "osd dump"}]: dispatch 2016-03-08 15:19:04.627730 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..13) proposal_timer already set 2016-03-08 15:19:04.627732 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17cdc00 2016-03-08 15:19:04.629273 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).reader couldn't read tag, (0) Success 2016-03-08 15:19:04.629298 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).fault (0) Success 2016-03-08 15:19:04.629327 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).fault on lossy channel, failing 2016-03-08 15:19:04.629332 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978300).stop 2016-03-08 15:19:04.629339 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978300).unregister_pipe 2016-03-08 15:19:04.629344 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978300).discard_queue 2016-03-08 15:19:04.629353 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978300).reader done 2016-03-08 15:19:04.629356 7f82a5352700 10 mon.a@0(leader) e1 ms_handle_reset 0x559cd1978300 127.0.0.1:0/2303057837 2016-03-08 15:19:04.629355 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978300).writer finishing 2016-03-08 15:19:04.629361 7f82a5352700 10 mon.a@0(leader) e1 reset/close on session client.? 127.0.0.1:0/2303057837 2016-03-08 15:19:04.629364 7f82a5352700 10 mon.a@0(leader) e1 remove_session 0x559cd1857600 client.? 127.0.0.1:0/2303057837 2016-03-08 15:19:04.629365 7f82a334e700 10 -- 127.0.0.1:7104/0 queue_reap 0x559cd1976800 2016-03-08 15:19:04.629374 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978300).writer done 2016-03-08 15:19:04.629380 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper 2016-03-08 15:19:04.629386 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaping pipe 0x559cd1976800 127.0.0.1:0/2303057837 2016-03-08 15:19:04.629390 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978300).discard_queue 2016-03-08 15:19:04.629394 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978300).unregister_pipe - not registered 2016-03-08 15:19:04.629399 7f82a6354700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/2303057837 pipe(0x559cd1976800 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978300).join 2016-03-08 15:19:04.629410 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaped pipe 0x559cd1976800 127.0.0.1:0/2303057837 2016-03-08 15:19:04.629417 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper deleted pipe 0x559cd1976800 2016-03-08 15:19:04.629420 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper done 2016-03-08 15:19:04.678620 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..13) propose_pending 2016-03-08 15:19:04.678637 7f82a5b53700 10 mon.a@0(leader).log v13 encode_full log v 13 2016-03-08 15:19:04.678674 7f82a5b53700 10 mon.a@0(leader).log v13 encode_pending v14 2016-03-08 15:19:04.678682 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..26) queue_pending_finisher 0x559cd14945d0 2016-03-08 15:19:04.678685 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..26) trigger_propose active, proposing now 2016-03-08 15:19:04.678689 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..26) propose_pending 27 8628 bytes 2016-03-08 15:19:04.678691 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..26) begin for 27 8628 bytes 2016-03-08 15:19:04.686025 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..26) commit_start 27 2016-03-08 15:19:04.692463 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..26) commit_finish 27 2016-03-08 15:19:04.692508 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:04.692534 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..7) refresh 2016-03-08 15:19:04.692547 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:04.692558 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..6) refresh 2016-03-08 15:19:04.692570 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..14) refresh 2016-03-08 15:19:04.692573 7f82a6b55700 10 mon.a@0(leader).log v14 update_from_paxos 2016-03-08 15:19:04.692575 7f82a6b55700 10 mon.a@0(leader).log v14 update_from_paxos version 14 summary v 13 2016-03-08 15:19:04.692582 7f82a6b55700 10 mon.a@0(leader).log v14 update_from_paxos latest full 13 2016-03-08 15:19:04.692592 7f82a6b55700 7 mon.a@0(leader).log v14 update_from_paxos applying incremental log 14 2016-03-08 15:19:04.585383 mon.0 127.0.0.1:7104/0 27 : cluster [INF] osd.0 127.0.0.1:6800/4256 boot 2016-03-08 15:19:04.692608 7f82a6b55700 20 mon.a@0(leader).log v14 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:04.692620 7f82a6b55700 7 mon.a@0(leader).log v14 update_from_paxos applying incremental log 14 2016-03-08 15:19:04.592501 mon.0 127.0.0.1:7104/0 28 : cluster [INF] osdmap e6: 1 osds: 1 up, 1 in 2016-03-08 15:19:04.692626 7f82a6b55700 20 mon.a@0(leader).log v14 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:04.692632 7f82a6b55700 7 mon.a@0(leader).log v14 update_from_paxos applying incremental log 14 2016-03-08 15:19:04.605722 mon.0 127.0.0.1:7104/0 29 : cluster [INF] pgmap v7: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:04.692638 7f82a6b55700 20 mon.a@0(leader).log v14 update_from_paxos logging for channel 'cluster' to file 'testdir/osd-crush/log' 2016-03-08 15:19:04.692646 7f82a6b55700 7 mon.a@0(leader).log v14 update_from_paxos applying incremental log 14 2016-03-08 15:19:04.627526 mon.0 127.0.0.1:7104/0 30 : audit [DBG] from='client.? 127.0.0.1:0/2303057837' entity='client.admin' cmd=[{"prefix": "osd dump"}]: dispatch 2016-03-08 15:19:04.692652 7f82a6b55700 20 mon.a@0(leader).log v14 update_from_paxos logging for channel 'audit' to file 'testdir/osd-crush/log' 2016-03-08 15:19:04.692654 7f82a6b55700 15 mon.a@0(leader).log v14 update_from_paxos logging for 1 channels 2016-03-08 15:19:04.692656 7f82a6b55700 15 mon.a@0(leader).log v14 update_from_paxos channel 'cluster' logging 340 bytes 2016-03-08 15:19:04.692672 7f82a6b55700 10 mon.a@0(leader).log v14 check_subs 2016-03-08 15:19:04.692688 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:04.692699 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) refresh 2016-03-08 15:19:04.692702 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos 2016-03-08 15:19:04.692704 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..7) post_refresh 2016-03-08 15:19:04.692706 7f82a6b55700 10 mon.a@0(leader).pg v7 post_paxos_update 2016-03-08 15:19:04.692708 7f82a6b55700 10 mon.a@0(leader).pg v7 check_subs 2016-03-08 15:19:04.692710 7f82a6b55700 20 mon.a@0(leader).pg v7 check_subs .. osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.692714 7f82a6b55700 20 mon.a@0(leader).pg v7 send_pg_creates osd.0 from 7 has nothing to send 2016-03-08 15:19:04.692715 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:04.692716 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..6) post_refresh 2016-03-08 15:19:04.692717 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..14) post_refresh 2016-03-08 15:19:04.692718 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:04.692719 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) post_refresh 2016-03-08 15:19:04.692721 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..27) commit_proposal 2016-03-08 15:19:04.692723 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..14) _active - not active 2016-03-08 15:19:04.692726 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..27) finish_round 2016-03-08 15:19:04.692727 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..27) finish_round waiting_for_acting 2016-03-08 15:19:04.692729 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..14) _active 2016-03-08 15:19:04.692730 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..14) remove_legacy_versions 2016-03-08 15:19:04.692737 7f82a6b55700 7 mon.a@0(leader).paxosservice(logm 1..14) _active creating new pending 2016-03-08 15:19:04.692746 7f82a6b55700 10 mon.a@0(leader).log v14 create_pending v 15 2016-03-08 15:19:04.692756 7f82a6b55700 7 mon.a@0(leader).log v14 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.692766 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd19d0ea0 0x559cd1557180 log(last 27) v1 2016-03-08 15:19:04.692770 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 27) v1 -- ?+0 0x559cd1557180 con 0x559cd1477080 2016-03-08 15:19:04.692774 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 27) v1 local 2016-03-08 15:19:04.692803 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 27) v1 ==== 0+0+0 (0 0 0) 0x559cd1557180 con 0x559cd1477080 2016-03-08 15:19:04.692812 7f82a6b55700 7 mon.a@0(leader).log v14 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.692831 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd19d1560 0x559cd1555680 log(last 28) v1 2016-03-08 15:19:04.692834 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 28) v1 -- ?+0 0x559cd1555680 con 0x559cd1477080 2016-03-08 15:19:04.692838 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 28) v1 local 2016-03-08 15:19:04.692856 7f82a6b55700 7 mon.a@0(leader).log v14 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.692867 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd19d1b00 0x559cd15558c0 log(last 29) v1 2016-03-08 15:19:04.692870 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 29) v1 -- ?+0 0x559cd15558c0 con 0x559cd1477080 2016-03-08 15:19:04.692873 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 29) v1 local 2016-03-08 15:19:04.692904 7f82a6b55700 7 mon.a@0(leader).log v14 _updated_log for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.692915 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd19d3cc0 0x559cd1555d40 log(last 30) v1 2016-03-08 15:19:04.692918 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(last 30) v1 -- ?+0 0x559cd1555d40 con 0x559cd1477080 2016-03-08 15:19:04.692921 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(last 30) v1 local 2016-03-08 15:19:04.692956 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..27) finish_round waiting_for_readable 2016-03-08 15:19:04.692958 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..27) finish_round waiting_for_writeable 2016-03-08 15:19:04.692960 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..27) finish_round done w/ waiters, state 1 2016-03-08 15:19:04.692996 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.693002 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.693007 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.693010 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.693012 7f82a5352700 20 allow all 2016-03-08 15:19:04.693028 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1557180 2016-03-08 15:19:04.693042 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 28) v1 ==== 0+0+0 (0 0 0) 0x559cd1555680 con 0x559cd1477080 2016-03-08 15:19:04.693065 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.693069 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.693074 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.693076 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.693083 7f82a5352700 20 allow all 2016-03-08 15:19:04.693099 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555680 2016-03-08 15:19:04.693103 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 29) v1 ==== 0+0+0 (0 0 0) 0x559cd15558c0 con 0x559cd1477080 2016-03-08 15:19:04.693125 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.693129 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.693133 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.693134 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.693135 7f82a5352700 20 allow all 2016-03-08 15:19:04.693146 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd15558c0 2016-03-08 15:19:04.693150 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(last 30) v1 ==== 0+0+0 (0 0 0) 0x559cd1555d40 con 0x559cd1477080 2016-03-08 15:19:04.693177 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.693181 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.693186 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.693188 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.693188 7f82a5352700 20 allow all 2016-03-08 15:19:04.693203 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1555d40 2016-03-08 15:19:04.705531 7f82a5b53700 10 mon.a@0(leader).paxosservice(osdmap 1..6) propose_pending 2016-03-08 15:19:04.705539 7f82a5b53700 10 mon.a@0(leader).osd e6 encode_pending e 7 2016-03-08 15:19:04.705582 7f82a5b53700 20 mon.a@0(leader).osd e6 full_crc 3076050385 inc_crc 725001832 2016-03-08 15:19:04.705589 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..27) queue_pending_finisher 0x559cd1494540 2016-03-08 15:19:04.705592 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..27) trigger_propose active, proposing now 2016-03-08 15:19:04.705595 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..27) propose_pending 28 1819 bytes 2016-03-08 15:19:04.705596 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..27) begin for 28 1819 bytes 2016-03-08 15:19:04.711871 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..27) commit_start 28 2016-03-08 15:19:04.718761 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..27) commit_finish 28 2016-03-08 15:19:04.718789 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:04.718814 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..7) refresh 2016-03-08 15:19:04.718827 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:04.718837 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..7) refresh 2016-03-08 15:19:04.718839 7f82a6b55700 15 mon.a@0(leader).osd e6 update_from_paxos paxos e 7, my e 6 2016-03-08 15:19:04.718852 7f82a6b55700 7 mon.a@0(leader).osd e6 update_from_paxos applying incremental 7 2016-03-08 15:19:04.718879 7f82a6b55700 1 mon.a@0(leader).osd e7 e7: 1 osds: 1 up, 1 in 2016-03-08 15:19:04.724963 7f82a6b55700 5 mon.a@0(leader).paxos(paxos refresh c 1..28) is_readable = 0 - now=2016-03-08 15:19:04.724967 lease_expire=0.000000 has v0 lc 28 2016-03-08 15:19:04.724982 7f82a6b55700 10 mon.a@0(leader).pg v7 check_osd_map -- osdmap not readable, waiting 2016-03-08 15:19:04.724985 7f82a6b55700 10 mon.a@0(leader).osd e7 check_subs 2016-03-08 15:19:04.724990 7f82a6b55700 10 mon.a@0(leader).osd e7 committed, telling random osd.0 127.0.0.1:6800/4256 all about it 2016-03-08 15:19:04.724996 7f82a6b55700 10 mon.a@0(leader).osd e7 build_incremental [6..7] 2016-03-08 15:19:04.725000 7f82a6b55700 20 mon.a@0(leader).osd e7 build_incremental inc 7 182 bytes 2016-03-08 15:19:04.725004 7f82a6b55700 20 mon.a@0(leader).osd e7 build_incremental inc 6 780 bytes 2016-03-08 15:19:04.725007 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:6800/4256 -- osd_map(6..7 src has 1..7) v3 -- ?+0 0x559cd17cdc00 con 0x559cd1978180 2016-03-08 15:19:04.725015 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message osd_map(6..7 src has 1..7) v3 remote, 127.0.0.1:6800/4256, have pipe. 2016-03-08 15:19:04.725024 7f82a6b55700 10 mon.a@0(leader).osd e7 update_logger 2016-03-08 15:19:04.725043 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.725057 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer encoding 10 features 576460752303423487 0x559cd17cdc00 osd_map(6..7 src has 1..7) v3 2016-03-08 15:19:04.725066 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..14) refresh 2016-03-08 15:19:04.725070 7f82a6b55700 10 mon.a@0(leader).log v14 update_from_paxos 2016-03-08 15:19:04.725070 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer signed seq # 10): sig = 0 2016-03-08 15:19:04.725072 7f82a6b55700 10 mon.a@0(leader).log v14 update_from_paxos version 14 summary v 14 2016-03-08 15:19:04.725075 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sending 10 0x559cd17cdc00 2016-03-08 15:19:04.725089 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:04.725102 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) refresh 2016-03-08 15:19:04.725107 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos 2016-03-08 15:19:04.725109 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..7) post_refresh 2016-03-08 15:19:04.725111 7f82a6b55700 10 mon.a@0(leader).pg v7 post_paxos_update 2016-03-08 15:19:04.725115 7f82a6b55700 10 mon.a@0(leader).pg v7 check_subs 2016-03-08 15:19:04.725117 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.725118 7f82a6b55700 20 mon.a@0(leader).pg v7 check_subs .. osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.725120 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.725122 7f82a6b55700 20 mon.a@0(leader).pg v7 send_pg_creates osd.0 from 7 has nothing to send 2016-03-08 15:19:04.725124 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:04.725125 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..7) post_refresh 2016-03-08 15:19:04.725127 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..14) post_refresh 2016-03-08 15:19:04.725128 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:04.725129 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) post_refresh 2016-03-08 15:19:04.725132 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..28) commit_proposal 2016-03-08 15:19:04.725135 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..7) _active - not active 2016-03-08 15:19:04.725137 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..28) finish_round 2016-03-08 15:19:04.725139 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..28) finish_round waiting_for_acting 2016-03-08 15:19:04.725141 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..7) _active 2016-03-08 15:19:04.725142 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..7) remove_legacy_versions 2016-03-08 15:19:04.725151 7f82a6b55700 7 mon.a@0(leader).paxosservice(osdmap 1..7) _active creating new pending 2016-03-08 15:19:04.725155 7f82a6b55700 10 mon.a@0(leader).osd e7 create_pending e 8 2016-03-08 15:19:04.725186 7f82a6b55700 7 mon.a@0(leader).osd e7 _reply_map 6 from osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.725194 7f82a6b55700 5 mon.a@0(leader).osd e7 send_latest to osd.0 127.0.0.1:6800/4256 start 6 2016-03-08 15:19:04.725201 7f82a6b55700 5 mon.a@0(leader).osd e7 send_incremental [6..7] to osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.725205 7f82a6b55700 10 mon.a@0(leader).osd e7 send_incrementalosd.0 127.0.0.1:6800/4256 should already have epoch 6 2016-03-08 15:19:04.725208 7f82a6b55700 10 mon.a@0(leader).osd e7 build_incremental [7..7] 2016-03-08 15:19:04.725210 7f82a6b55700 20 mon.a@0(leader).osd e7 build_incremental inc 7 182 bytes 2016-03-08 15:19:04.725225 7f82a6b55700 2 mon.a@0(leader) e1 send_reply 0x559cd19d34e0 0x559cd1562f80 osd_map(7..7 src has 1..7) v3 2016-03-08 15:19:04.725228 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:6800/4256 -- osd_map(7..7 src has 1..7) v3 -- ?+0 0x559cd1562f80 con 0x559cd1978180 2016-03-08 15:19:04.725234 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message osd_map(7..7 src has 1..7) v3 remote, 127.0.0.1:6800/4256, have pipe. 2016-03-08 15:19:04.725244 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.725257 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer encoding 11 features 576460752303423487 0x559cd1562f80 osd_map(7..7 src has 1..7) v3 2016-03-08 15:19:04.725259 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..28) is_readable = 1 - now=2016-03-08 15:19:04.725260 lease_expire=0.000000 has v0 lc 28 2016-03-08 15:19:04.725265 7f82a6b55700 10 mon.a@0(leader).pg v7 check_osd_map applying osdmap e7 to pg_map 2016-03-08 15:19:04.725268 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer signed seq # 11): sig = 0 2016-03-08 15:19:04.725275 7f82a6b55700 10 mon.a@0(leader).pg v7 map_pg_creates to 4 pgs, osdmap epoch 7 2016-03-08 15:19:04.725273 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sending 11 0x559cd1562f80 2016-03-08 15:19:04.725298 7f82a314c700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer: state = open policy.server=1 2016-03-08 15:19:04.725305 7f82a314c700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:6800/4256 pipe(0x559cd1975400 sd=22 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978180).writer sleeping 2016-03-08 15:19:04.725333 7f82a6b55700 10 mon.a@0(leader).pg v7 register_new_pgs checking pg pools for osdmap epoch 7, last_pg_scan 6 2016-03-08 15:19:04.725337 7f82a6b55700 10 mon.a@0(leader).pg v7 no change in pool 1 replicated size 3 min_size 2 crush_ruleset 0 object_hash rjenkins pg_num 4 pgp_num 4 last_change 3 flags hashpspool stripe_width 0 2016-03-08 15:19:04.725343 7f82a6b55700 10 mon.a@0(leader).pg v7 register_new_pgs registered 0 new pgs, removed 0 uncreated pgs 2016-03-08 15:19:04.725345 7f82a6b55700 10 mon.a@0(leader).pg v7 check_down_pgs 2016-03-08 15:19:04.725348 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..7) propose_pending 2016-03-08 15:19:04.725350 7f82a6b55700 10 mon.a@0(leader).pg v7 encode_pending v 8 2016-03-08 15:19:04.725367 7f82a6b55700 5 mon.a@0(leader).paxos(paxos active c 1..28) queue_pending_finisher 0x559cd1494650 2016-03-08 15:19:04.725371 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..28) trigger_propose active, proposing now 2016-03-08 15:19:04.725381 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..28) propose_pending 29 474 bytes 2016-03-08 15:19:04.725384 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..28) begin for 29 474 bytes 2016-03-08 15:19:04.731592 7f82a6b55700 10 mon.a@0(leader).paxos(paxos updating c 1..28) commit_start 29 2016-03-08 15:19:04.731634 7f82a6b55700 10 mon.a@0(leader).osd e7 update_logger 2016-03-08 15:19:04.731643 7f82a6b55700 0 log_channel(cluster) log [INF] : osdmap e7: 1 osds: 1 up, 1 in 2016-03-08 15:19:04.731661 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 31 at 2016-03-08 15:19:04.731646) v1 -- ?+0 0x559cd1563200 con 0x559cd1477080 2016-03-08 15:19:04.731668 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 31 at 2016-03-08 15:19:04.731646) v1 local 2016-03-08 15:19:04.731676 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..28) finish_round waiting_for_readable 2016-03-08 15:19:04.731678 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..28) finish_round waiting_for_writeable 2016-03-08 15:19:04.731679 7f82a6b55700 10 mon.a@0(leader).paxos(paxos writing c 1..28) finish_round done w/ waiters, state 4 2016-03-08 15:19:04.731731 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 31 at 2016-03-08 15:19:04.731646) v1 ==== 0+0+0 (0 0 0) 0x559cd1563200 con 0x559cd1477080 2016-03-08 15:19:04.731760 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.731762 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.731781 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..14) dispatch 0x559cd1563200 log(1 entries from seq 31 at 2016-03-08 15:19:04.731646) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.731789 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..28) is_readable = 1 - now=2016-03-08 15:19:04.731789 lease_expire=0.000000 has v0 lc 28 2016-03-08 15:19:04.731799 7f82a5352700 10 mon.a@0(leader).log v14 preprocess_query log(1 entries from seq 31 at 2016-03-08 15:19:04.731646) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.731808 7f82a5352700 10 mon.a@0(leader).log v14 preprocess_log log(1 entries from seq 31 at 2016-03-08 15:19:04.731646) v1 from mon.0 2016-03-08 15:19:04.731811 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.731812 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.731813 7f82a5352700 20 allow all 2016-03-08 15:19:04.731820 7f82a5352700 10 mon.a@0(leader).log v14 prepare_update log(1 entries from seq 31 at 2016-03-08 15:19:04.731646) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.731825 7f82a5352700 10 mon.a@0(leader).log v14 prepare_log log(1 entries from seq 31 at 2016-03-08 15:19:04.731646) v1 from mon.0 2016-03-08 15:19:04.731827 7f82a5352700 10 mon.a@0(leader).log v14 logging 2016-03-08 15:19:04.731646 mon.0 127.0.0.1:7104/0 31 : cluster [INF] osdmap e7: 1 osds: 1 up, 1 in 2016-03-08 15:19:04.731838 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..14) setting proposal_timer 0x559cd1494670 with delay of 0.0869279 2016-03-08 15:19:04.731849 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1563200 2016-03-08 15:19:04.740163 7f82a4350700 20 accepter.accepter poll got 1 2016-03-08 15:19:04.740172 7f82a4350700 10 accepter.pfd.revents=1 2016-03-08 15:19:04.740178 7f82a4350700 10 accepter.accepted incoming on sd 21 2016-03-08 15:19:04.740204 7f82a4350700 20 accepter.accepter calling poll 2016-03-08 15:19:04.740213 7f82aea26700 10 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd1a26000 sd=21 :0 s=0 pgs=0 cs=0 l=0 c=0x559cd1978480).accept 2016-03-08 15:19:04.740261 7f82aea26700 1 -- 127.0.0.1:7104/0 >> :/0 pipe(0x559cd1a26000 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1978480).accept sd=21 127.0.0.1:52234/0 2016-03-08 15:19:04.740326 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1978480).accept peer addr is 127.0.0.1:0/1181108721 2016-03-08 15:19:04.740333 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=0 pgs=0 cs=0 l=0 c=0x559cd1978480).accept got peer connect_seq 0 global_seq 1 2016-03-08 15:19:04.740335 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978480).accept of host_type 8, policy.lossy=1 policy.server=1 policy.standby=0 policy.resetcheck=0 2016-03-08 15:19:04.740338 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978480).accept my proto 15, their proto 15 2016-03-08 15:19:04.740344 7f82aea26700 10 mon.a@0(leader) e1 ms_verify_authorizer 127.0.0.1:0/1181108721 client protocol 0 2016-03-08 15:19:04.740348 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978480).accept: setting up session_security. 2016-03-08 15:19:04.740350 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=0 pgs=0 cs=0 l=1 c=0x559cd1978480).accept new session 2016-03-08 15:19:04.740352 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).accept success, connect_seq = 1, sending READY 2016-03-08 15:19:04.740354 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).accept features 576460752303423487 2016-03-08 15:19:04.740361 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).register_pipe 2016-03-08 15:19:04.740376 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).discard_requeued_up_to 0 2016-03-08 15:19:04.740379 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).accept starting writer, state open 2016-03-08 15:19:04.740390 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).accept done 2016-03-08 15:19:04.740393 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading tag... 2016-03-08 15:19:04.740416 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740426 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sleeping 2016-03-08 15:19:04.740431 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got KEEPALIVE2 2016-03-08 15:19:04.740408 2016-03-08 15:19:04.740439 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading tag... 2016-03-08 15:19:04.740445 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740449 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got MSG 2016-03-08 15:19:04.740452 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got envelope type=17 src client.? front=60 data=0 off 0 2016-03-08 15:19:04.740450 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).write_keepalive2 15 2016-03-08 15:19:04.740408 2016-03-08 15:19:04.740455 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader wants 60 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.740459 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader wants 60 from dispatch throttler 0/104857600 2016-03-08 15:19:04.740463 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got front 60 2016-03-08 15:19:04.740465 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).aborted = 0 2016-03-08 15:19:04.740467 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got 60 + 0 + 0 byte message 2016-03-08 15:19:04.740471 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740475 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sleeping 2016-03-08 15:19:04.740482 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got message 1 0x559cd17cbe00 auth(proto 0 30 bytes epoch 0) v1 2016-03-08 15:19:04.740489 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd17cbe00 prio 127 2016-03-08 15:19:04.740496 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading tag... 2016-03-08 15:19:04.740517 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740519 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/1181108721 1 ==== auth(proto 0 30 bytes epoch 0) v1 ==== 60+0+0 (900162395 0 0) 0x559cd17cbe00 con 0x559cd1978480 2016-03-08 15:19:04.740527 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).write_ack 1 2016-03-08 15:19:04.740533 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740542 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sleeping 2016-03-08 15:19:04.740550 7f82a5352700 10 mon.a@0(leader) e1 _ms_dispatch new session 0x559cd19e9600 MonSession(client.? 127.0.0.1:0/1181108721 is open) 2016-03-08 15:19:04.740554 7f82a5352700 20 mon.a@0(leader) e1 caps 2016-03-08 15:19:04.740559 7f82a5352700 10 mon.a@0(leader).paxosservice(auth 1..3) dispatch 0x559cd17cbe00 auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/1181108721 con 0x559cd1978480 2016-03-08 15:19:04.740564 7f82a5352700 5 mon.a@0(leader).paxos(paxos writing c 1..28) is_readable = 1 - now=2016-03-08 15:19:04.740564 lease_expire=0.000000 has v0 lc 28 2016-03-08 15:19:04.740567 7f82a5352700 10 mon.a@0(leader).auth v3 preprocess_query auth(proto 0 30 bytes epoch 0) v1 from client.? 127.0.0.1:0/1181108721 2016-03-08 15:19:04.740571 7f82a5352700 10 mon.a@0(leader).auth v3 prep_auth() blob_size=30 2016-03-08 15:19:04.740576 7f82a5352700 10 mon.a@0(leader).auth v3 AuthMonitor::assign_global_id m=auth(proto 0 30 bytes epoch 0) v1 mon=0/1 last_allocated=4104 max_global_id=14096 2016-03-08 15:19:04.740578 7f82a5352700 10 mon.a@0(leader).auth v3 next_global_id should be 4105 2016-03-08 15:19:04.740584 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1181108721 -- mon_map magic: 0 v1 -- ?+0 0x559cd1555d40 con 0x559cd1978480 2016-03-08 15:19:04.740588 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/1181108721, have pipe. 2016-03-08 15:19:04.740597 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd1a0eea0 0x559cd17cd700 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:04.740602 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1181108721 -- auth_reply(proto 1 0 (0) Success) v1 -- ?+0 0x559cd17cd700 con 0x559cd1978480 2016-03-08 15:19:04.740605 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message auth_reply(proto 1 0 (0) Success) v1 remote, 127.0.0.1:0/1181108721, have pipe. 2016-03-08 15:19:04.740625 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 60 to dispatch throttler 60/104857600 2016-03-08 15:19:04.740622 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740628 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17cbe00 2016-03-08 15:19:04.740630 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer encoding 1 features 576460752303423487 0x559cd1555d40 mon_map magic: 0 v1 2016-03-08 15:19:04.740642 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer signed seq # 1): sig = 0 2016-03-08 15:19:04.740647 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sending 1 0x559cd1555d40 2016-03-08 15:19:04.740665 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740669 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer encoding 2 features 576460752303423487 0x559cd17cd700 auth_reply(proto 1 0 (0) Success) v1 2016-03-08 15:19:04.740681 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer signed seq # 2): sig = 0 2016-03-08 15:19:04.740692 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sending 2 0x559cd17cd700 2016-03-08 15:19:04.740703 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740707 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sleeping 2016-03-08 15:19:04.740806 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got ACK 2016-03-08 15:19:04.740814 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got ack seq 1 2016-03-08 15:19:04.740816 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading tag... 2016-03-08 15:19:04.740818 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got ACK 2016-03-08 15:19:04.740820 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got ack seq 2 2016-03-08 15:19:04.740822 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading tag... 2016-03-08 15:19:04.740824 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got MSG 2016-03-08 15:19:04.740826 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got envelope type=15 src client.? front=23 data=0 off 0 2016-03-08 15:19:04.740829 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader wants 23 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.740831 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader wants 23 from dispatch throttler 0/104857600 2016-03-08 15:19:04.740835 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got front 23 2016-03-08 15:19:04.740837 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).aborted = 0 2016-03-08 15:19:04.740839 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:04.740847 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got message 2 0x559cd19e9800 mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:04.740851 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd19e9800 prio 127 2016-03-08 15:19:04.740856 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading tag... 2016-03-08 15:19:04.740858 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740869 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got MSG 2016-03-08 15:19:04.740862 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.? 127.0.0.1:0/1181108721 2 ==== mon_subscribe({monmap=0+}) v2 ==== 23+0+0 (1620593354 0 0) 0x559cd19e9800 con 0x559cd1978480 2016-03-08 15:19:04.740869 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).write_ack 2 2016-03-08 15:19:04.740872 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got envelope type=15 src client.4105 front=23 data=0 off 0 2016-03-08 15:19:04.740874 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader wants 23 bytes from policy throttler 23/104857600 2016-03-08 15:19:04.740878 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader wants 23 from dispatch throttler 23/104857600 2016-03-08 15:19:04.740878 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740882 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got front 23 2016-03-08 15:19:04.740883 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sleeping 2016-03-08 15:19:04.740886 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).aborted = 0 2016-03-08 15:19:04.740890 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got 23 + 0 + 0 byte message 2016-03-08 15:19:04.740895 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd19e9600 for client.? 127.0.0.1:0/1181108721 2016-03-08 15:19:04.740896 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got message 3 0x559cd19e9a00 mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:04.740898 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.740899 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd19e9a00 prio 127 2016-03-08 15:19:04.740902 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.740902 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading tag... 2016-03-08 15:19:04.740905 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.740905 7f82a5352700 20 allow all 2016-03-08 15:19:04.740907 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({monmap=0+}) v2 2016-03-08 15:19:04.740911 7f82a5352700 10 mon.a@0(leader) e1 check_sub monmap next 0 have 1 2016-03-08 15:19:04.740908 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740914 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1181108721 -- mon_map magic: 0 v1 -- ?+0 0x559cd15558c0 con 0x559cd1978480 2016-03-08 15:19:04.740914 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).write_ack 3 2016-03-08 15:19:04.740917 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_map magic: 0 v1 remote, 127.0.0.1:0/1181108721, have pipe. 2016-03-08 15:19:04.740922 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer encoding 3 features 576460752303423487 0x559cd15558c0 mon_map magic: 0 v1 2016-03-08 15:19:04.740927 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 46/104857600 2016-03-08 15:19:04.740929 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd19e9800 2016-03-08 15:19:04.740929 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer signed seq # 3): sig = 0 2016-03-08 15:19:04.740932 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4105 127.0.0.1:0/1181108721 3 ==== mon_subscribe({osdmap=0}) v2 ==== 23+0+0 (319581110 0 0) 0x559cd19e9a00 con 0x559cd1978480 2016-03-08 15:19:04.740935 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sending 3 0x559cd15558c0 2016-03-08 15:19:04.740947 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd19e9600 for client.? 127.0.0.1:0/1181108721 2016-03-08 15:19:04.740962 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.740966 7f82a5352700 20 is_capable service=mon command= read on cap allow * 2016-03-08 15:19:04.740967 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.740968 7f82a5352700 20 allow all 2016-03-08 15:19:04.740969 7f82a5352700 10 mon.a@0(leader) e1 handle_subscribe mon_subscribe({osdmap=0}) v2 2016-03-08 15:19:04.740972 7f82a5352700 20 is_capable service=osd command= read on cap allow * 2016-03-08 15:19:04.740975 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.740975 7f82a5352700 20 allow all 2016-03-08 15:19:04.740977 7f82a5352700 10 mon.a@0(leader).osd e7 check_sub 0x559cd188c640 next 0 (onetime) 2016-03-08 15:19:04.740976 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740980 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sleeping 2016-03-08 15:19:04.740982 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1181108721 -- osd_map(7..7 src has 1..7) v3 -- ?+0 0x559cd17cbe00 con 0x559cd1978480 2016-03-08 15:19:04.740986 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message osd_map(7..7 src has 1..7) v3 remote, 127.0.0.1:0/1181108721, have pipe. 2016-03-08 15:19:04.740994 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.740997 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer encoding 4 features 576460752303423487 0x559cd17cbe00 osd_map(7..7 src has 1..7) v3 2016-03-08 15:19:04.741003 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 23 to dispatch throttler 23/104857600 2016-03-08 15:19:04.741005 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd19e9a00 2016-03-08 15:19:04.741005 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer signed seq # 4): sig = 0 2016-03-08 15:19:04.741008 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sending 4 0x559cd17cbe00 2016-03-08 15:19:04.741030 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.741033 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sleeping 2016-03-08 15:19:04.742966 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got ACK 2016-03-08 15:19:04.742980 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got ack seq 3 2016-03-08 15:19:04.742985 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading tag... 2016-03-08 15:19:04.742989 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got ACK 2016-03-08 15:19:04.742994 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got ack seq 4 2016-03-08 15:19:04.742997 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading tag... 2016-03-08 15:19:04.743001 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got MSG 2016-03-08 15:19:04.743028 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got envelope type=50 src client.4105 front=80 data=0 off 0 2016-03-08 15:19:04.743046 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader wants 80 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.743053 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader wants 80 from dispatch throttler 0/104857600 2016-03-08 15:19:04.743062 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got front 80 2016-03-08 15:19:04.743068 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).aborted = 0 2016-03-08 15:19:04.743073 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got 80 + 0 + 0 byte message 2016-03-08 15:19:04.743085 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got message 4 0x559cd1a3a480 mon_command({"prefix": "get_command_descriptions"} v 0) v1 2016-03-08 15:19:04.743093 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1a3a480 prio 127 2016-03-08 15:19:04.743106 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading tag... 2016-03-08 15:19:04.743109 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.743107 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4105 127.0.0.1:0/1181108721 4 ==== mon_command({"prefix": "get_command_descriptions"} v 0) v1 ==== 80+0+0 (3134233619 0 0) 0x559cd1a3a480 con 0x559cd1978480 2016-03-08 15:19:04.743120 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).write_ack 4 2016-03-08 15:19:04.743128 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.743133 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sleeping 2016-03-08 15:19:04.743149 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd19e9600 for client.? 127.0.0.1:0/1181108721 2016-03-08 15:19:04.743154 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.744263 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd1a0f560 0x559cd1555680 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:04.744267 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1181108721 -- mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 -- ?+37345 0x559cd1555680 con 0x559cd1978480 2016-03-08 15:19:04.744273 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 remote, 127.0.0.1:0/1181108721, have pipe. 2016-03-08 15:19:04.744284 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.744290 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer encoding 5 features 576460752303423487 0x559cd1555680 mon_command_ack([{"prefix": "get_command_descriptions"}]=0 v0) v1 2016-03-08 15:19:04.744295 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 80 to dispatch throttler 80/104857600 2016-03-08 15:19:04.744299 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1a3a480 2016-03-08 15:19:04.744299 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer signed seq # 5): sig = 0 2016-03-08 15:19:04.744303 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sending 5 0x559cd1555680 2016-03-08 15:19:04.744328 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.744332 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sleeping 2016-03-08 15:19:04.749321 7f82a6b55700 20 mon.a@0(leader).paxos(paxos writing c 1..28) commit_finish 29 2016-03-08 15:19:04.749354 7f82a6b55700 10 mon.a@0(leader) e1 refresh_from_paxos 2016-03-08 15:19:04.749370 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..8) refresh 2016-03-08 15:19:04.749372 7f82a6b55700 10 mon.a@0(leader).pg v7 update_from_paxos read_incremental 2016-03-08 15:19:04.749380 7f82a6b55700 10 mon.a@0(leader).pg v8 read_pgmap_meta 2016-03-08 15:19:04.749391 7f82a6b55700 10 mon.a@0(leader).pg v8 update_logger 2016-03-08 15:19:04.749399 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) refresh 2016-03-08 15:19:04.749423 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..7) refresh 2016-03-08 15:19:04.749433 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..14) refresh 2016-03-08 15:19:04.749435 7f82a6b55700 10 mon.a@0(leader).log v14 update_from_paxos 2016-03-08 15:19:04.749436 7f82a6b55700 10 mon.a@0(leader).log v14 update_from_paxos version 14 summary v 14 2016-03-08 15:19:04.749443 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) refresh 2016-03-08 15:19:04.749449 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) refresh 2016-03-08 15:19:04.749451 7f82a6b55700 10 mon.a@0(leader).auth v3 update_from_paxos 2016-03-08 15:19:04.749452 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..8) post_refresh 2016-03-08 15:19:04.749453 7f82a6b55700 10 mon.a@0(leader).pg v8 post_paxos_update 2016-03-08 15:19:04.749455 7f82a6b55700 10 mon.a@0(leader).pg v8 check_subs 2016-03-08 15:19:04.749456 7f82a6b55700 20 mon.a@0(leader).pg v8 check_subs .. osd.0 127.0.0.1:6800/4256 2016-03-08 15:19:04.749459 7f82a6b55700 20 mon.a@0(leader).pg v8 send_pg_creates osd.0 from 7 has nothing to send 2016-03-08 15:19:04.749460 7f82a6b55700 10 mon.a@0(leader).paxosservice(mdsmap 1..1) post_refresh 2016-03-08 15:19:04.749461 7f82a6b55700 10 mon.a@0(leader).paxosservice(osdmap 1..7) post_refresh 2016-03-08 15:19:04.749462 7f82a6b55700 10 mon.a@0(leader).paxosservice(logm 1..14) post_refresh 2016-03-08 15:19:04.749462 7f82a6b55700 10 mon.a@0(leader).paxosservice(monmap 1..1) post_refresh 2016-03-08 15:19:04.749463 7f82a6b55700 10 mon.a@0(leader).paxosservice(auth 1..3) post_refresh 2016-03-08 15:19:04.749464 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..29) commit_proposal 2016-03-08 15:19:04.749466 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..8) _active - not active 2016-03-08 15:19:04.749473 7f82a6b55700 10 mon.a@0(leader).paxos(paxos refresh c 1..29) finish_round 2016-03-08 15:19:04.749474 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..29) finish_round waiting_for_acting 2016-03-08 15:19:04.749476 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..8) _active 2016-03-08 15:19:04.749476 7f82a6b55700 10 mon.a@0(leader).paxosservice(pgmap 1..8) remove_legacy_versions 2016-03-08 15:19:04.749480 7f82a6b55700 7 mon.a@0(leader).paxosservice(pgmap 1..8) _active creating new pending 2016-03-08 15:19:04.749481 7f82a6b55700 10 mon.a@0(leader).pg v8 create_pending v 9 2016-03-08 15:19:04.749482 7f82a6b55700 10 mon.a@0(leader).pg v8 check_osd_map already seen 7 >= 7 2016-03-08 15:19:04.749483 7f82a6b55700 10 mon.a@0(leader).pg v8 update_logger 2016-03-08 15:19:04.749492 7f82a6b55700 0 log_channel(cluster) log [INF] : pgmap v8: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:04.749503 7f82a6b55700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 32 at 2016-03-08 15:19:04.749493) v1 -- ?+0 0x559cd1563700 con 0x559cd1477080 2016-03-08 15:19:04.749508 7f82a6b55700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 32 at 2016-03-08 15:19:04.749493) v1 local 2016-03-08 15:19:04.749514 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..29) finish_round waiting_for_readable 2016-03-08 15:19:04.749515 7f82a6b55700 20 mon.a@0(leader).paxos(paxos active c 1..29) finish_round waiting_for_writeable 2016-03-08 15:19:04.749516 7f82a6b55700 10 mon.a@0(leader).paxos(paxos active c 1..29) finish_round done w/ waiters, state 1 2016-03-08 15:19:04.749536 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 32 at 2016-03-08 15:19:04.749493) v1 ==== 0+0+0 (0 0 0) 0x559cd1563700 con 0x559cd1477080 2016-03-08 15:19:04.749559 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.749562 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.749569 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..14) dispatch 0x559cd1563700 log(1 entries from seq 32 at 2016-03-08 15:19:04.749493) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.749573 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..29) is_readable = 1 - now=2016-03-08 15:19:04.749573 lease_expire=0.000000 has v0 lc 29 2016-03-08 15:19:04.749578 7f82a5352700 10 mon.a@0(leader).log v14 preprocess_query log(1 entries from seq 32 at 2016-03-08 15:19:04.749493) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.749584 7f82a5352700 10 mon.a@0(leader).log v14 preprocess_log log(1 entries from seq 32 at 2016-03-08 15:19:04.749493) v1 from mon.0 2016-03-08 15:19:04.749588 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.749589 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.749590 7f82a5352700 20 allow all 2016-03-08 15:19:04.749597 7f82a5352700 10 mon.a@0(leader).log v14 prepare_update log(1 entries from seq 32 at 2016-03-08 15:19:04.749493) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.749603 7f82a5352700 10 mon.a@0(leader).log v14 prepare_log log(1 entries from seq 32 at 2016-03-08 15:19:04.749493) v1 from mon.0 2016-03-08 15:19:04.749608 7f82a5352700 10 mon.a@0(leader).log v14 logging 2016-03-08 15:19:04.749493 mon.0 127.0.0.1:7104/0 32 : cluster [INF] pgmap v8: 4 pgs: 4 creating; 0 bytes data, 0 kB used, 0 kB / 0 kB avail 2016-03-08 15:19:04.749617 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..14) proposal_timer already set 2016-03-08 15:19:04.749618 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1563700 2016-03-08 15:19:04.791881 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got ACK 2016-03-08 15:19:04.791897 7f82aea26700 15 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got ack seq 5 2016-03-08 15:19:04.791901 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading tag... 2016-03-08 15:19:04.791904 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got MSG 2016-03-08 15:19:04.791908 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got envelope type=50 src client.4105 front=71 data=51 off 0 2016-03-08 15:19:04.791913 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader wants 122 bytes from policy throttler 0/104857600 2016-03-08 15:19:04.791918 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader wants 122 from dispatch throttler 0/104857600 2016-03-08 15:19:04.791926 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got front 71 2016-03-08 15:19:04.791930 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader allocating new rx buffer at offset 0 2016-03-08 15:19:04.791944 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading nonblocking into 0x559cd14a9720 len 51 2016-03-08 15:19:04.791963 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).aborted = 0 2016-03-08 15:19:04.791966 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got 71 + 0 + 51 byte message 2016-03-08 15:19:04.791990 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader got message 5 0x559cd1a3bb00 mon_command({"prefix": "osd setcrushmap"} v 0) v1 2016-03-08 15:19:04.792006 7f82aea26700 20 -- 127.0.0.1:7104/0 queue 0x559cd1a3bb00 prio 127 2016-03-08 15:19:04.792014 7f82aea26700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader reading tag... 2016-03-08 15:19:04.792016 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.792034 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).write_ack 5 2016-03-08 15:19:04.792041 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.792037 7f82a5352700 1 -- 127.0.0.1:7104/0 <== client.4105 127.0.0.1:0/1181108721 5 ==== mon_command({"prefix": "osd setcrushmap"} v 0) v1 ==== 71+0+51 (2955683621 0 2843684325) 0x559cd1a3bb00 con 0x559cd1978480 2016-03-08 15:19:04.792046 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sleeping 2016-03-08 15:19:04.792073 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd19e9600 for client.? 127.0.0.1:0/1181108721 2016-03-08 15:19:04.792077 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.792105 7f82a5352700 0 mon.a@0(leader) e1 handle_command mon_command({"prefix": "osd setcrushmap"} v 0) v1 2016-03-08 15:19:04.792118 7f82a5352700 20 is_capable service=osd command=osd setcrushmap read write on cap allow * 2016-03-08 15:19:04.792121 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.792122 7f82a5352700 20 allow all 2016-03-08 15:19:04.792122 7f82a5352700 10 mon.a@0(leader) e1 _allowed_command capable 2016-03-08 15:19:04.792128 7f82a5352700 0 log_channel(audit) log [INF] : from='client.? 127.0.0.1:0/1181108721' entity='client.admin' cmd=[{"prefix": "osd setcrushmap"}]: dispatch 2016-03-08 15:19:04.792140 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:7104/0 -- log(1 entries from seq 33 at 2016-03-08 15:19:04.792129) v1 -- ?+0 0x559cd17cd980 con 0x559cd1477080 2016-03-08 15:19:04.792146 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message log(1 entries from seq 33 at 2016-03-08 15:19:04.792129) v1 local 2016-03-08 15:19:04.792157 7f82a5352700 10 mon.a@0(leader).paxosservice(osdmap 1..7) dispatch 0x559cd1a3bb00 mon_command({"prefix": "osd setcrushmap"} v 0) v1 from client.4105 127.0.0.1:0/1181108721 con 0x559cd1978480 2016-03-08 15:19:04.792163 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..29) is_readable = 1 - now=2016-03-08 15:19:04.792164 lease_expire=0.000000 has v0 lc 29 2016-03-08 15:19:04.792175 7f82a5352700 10 mon.a@0(leader).osd e7 preprocess_query mon_command({"prefix": "osd setcrushmap"} v 0) v1 from client.4105 127.0.0.1:0/1181108721 2016-03-08 15:19:04.792197 7f82a5352700 7 mon.a@0(leader).osd e7 prepare_update mon_command({"prefix": "osd setcrushmap"} v 0) v1 from client.4105 127.0.0.1:0/1181108721 2016-03-08 15:19:04.792208 7f82a5352700 10 mon.a@0(leader).osd e7 prepare_command setting new crush map 2016-03-08 15:19:04.792232 7f82a5352700 10 mon.a@0(leader).osd e7 testing map 2016-03-08 15:19:04.800507 7f82a5352700 -1 mon.a@0(leader).osd e7 error on crush map: ï\ÑœU 2016-03-08 15:19:04.800545 7f82a5352700 2 mon.a@0(leader) e1 send_reply 0x559cd1a0fd40 0x559cd1557180 mon_command_ack([{"prefix": "osd setcrushmap"}]=-22 Failed crushmap test: ï\ÑœU v7) v1 2016-03-08 15:19:04.800549 7f82a5352700 1 -- 127.0.0.1:7104/0 --> 127.0.0.1:0/1181108721 -- mon_command_ack([{"prefix": "osd setcrushmap"}]=-22 Failed crushmap test: ï\ÑœU v7) v1 -- ?+0 0x559cd1557180 con 0x559cd1978480 2016-03-08 15:19:04.800554 7f82a5352700 20 -- 127.0.0.1:7104/0 submit_message mon_command_ack([{"prefix": "osd setcrushmap"}]=-22 Failed crushmap test: ï\ÑœU v7) v1 remote, 127.0.0.1:0/1181108721, have pipe. 2016-03-08 15:19:04.800584 7f82a5352700 10 -- 127.0.0.1:7104/0 dispatch_throttle_release 122 to dispatch throttler 122/104857600 2016-03-08 15:19:04.800589 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd1a3bb00 2016-03-08 15:19:04.800584 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.800595 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer encoding 6 features 576460752303423487 0x559cd1557180 mon_command_ack([{"prefix": "osd setcrushmap"}]=-22 Failed crushmap test: ï\ÑœU v7) v1 2016-03-08 15:19:04.800598 7f82a5352700 1 -- 127.0.0.1:7104/0 <== mon.0 127.0.0.1:7104/0 0 ==== log(1 entries from seq 33 at 2016-03-08 15:19:04.792129) v1 ==== 0+0+0 (0 0 0) 0x559cd17cd980 con 0x559cd1477080 2016-03-08 15:19:04.800603 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer signed seq # 6): sig = 0 2016-03-08 15:19:04.800606 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sending 6 0x559cd1557180 2016-03-08 15:19:04.800618 7f82a5352700 20 mon.a@0(leader) e1 _ms_dispatch existing session 0x559cd14d6a00 for mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.800621 7f82a5352700 20 mon.a@0(leader) e1 caps allow * 2016-03-08 15:19:04.800634 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..14) dispatch 0x559cd17cd980 log(1 entries from seq 33 at 2016-03-08 15:19:04.792129) v1 from mon.0 127.0.0.1:7104/0 con 0x559cd1477080 2016-03-08 15:19:04.800638 7f82a5352700 5 mon.a@0(leader).paxos(paxos active c 1..29) is_readable = 1 - now=2016-03-08 15:19:04.800639 lease_expire=0.000000 has v0 lc 29 2016-03-08 15:19:04.800641 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer: state = open policy.server=1 2016-03-08 15:19:04.800646 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).writer sleeping 2016-03-08 15:19:04.800646 7f82a5352700 10 mon.a@0(leader).log v14 preprocess_query log(1 entries from seq 33 at 2016-03-08 15:19:04.792129) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.800654 7f82a5352700 10 mon.a@0(leader).log v14 preprocess_log log(1 entries from seq 33 at 2016-03-08 15:19:04.792129) v1 from mon.0 2016-03-08 15:19:04.800658 7f82a5352700 20 is_capable service=log command= write on cap allow * 2016-03-08 15:19:04.800659 7f82a5352700 20 allow so far , doing grant allow * 2016-03-08 15:19:04.800660 7f82a5352700 20 allow all 2016-03-08 15:19:04.800669 7f82a5352700 10 mon.a@0(leader).log v14 prepare_update log(1 entries from seq 33 at 2016-03-08 15:19:04.792129) v1 from mon.0 127.0.0.1:7104/0 2016-03-08 15:19:04.800675 7f82a5352700 10 mon.a@0(leader).log v14 prepare_log log(1 entries from seq 33 at 2016-03-08 15:19:04.792129) v1 from mon.0 2016-03-08 15:19:04.800677 7f82a5352700 10 mon.a@0(leader).log v14 logging 2016-03-08 15:19:04.792129 mon.0 127.0.0.1:7104/0 33 : audit [INF] from='client.? 127.0.0.1:0/1181108721' entity='client.admin' cmd=[{"prefix": "osd setcrushmap"}]: dispatch 2016-03-08 15:19:04.800689 7f82a5352700 10 mon.a@0(leader).paxosservice(logm 1..14) proposal_timer already set 2016-03-08 15:19:04.800690 7f82a5352700 20 -- 127.0.0.1:7104/0 done calling dispatch on 0x559cd17cd980 2016-03-08 15:19:04.814184 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).reader couldn't read tag, (0) Success 2016-03-08 15:19:04.814224 7f82aea26700 2 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).fault (0) Success 2016-03-08 15:19:04.814254 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).fault on lossy channel, failing 2016-03-08 15:19:04.814257 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=2 pgs=1 cs=1 l=1 c=0x559cd1978480).stop 2016-03-08 15:19:04.814263 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978480).unregister_pipe 2016-03-08 15:19:04.814274 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978480).discard_queue 2016-03-08 15:19:04.814283 7f82aea26700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978480).reader done 2016-03-08 15:19:04.814291 7f82a334e700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978480).writer finishing 2016-03-08 15:19:04.814295 7f82a5352700 10 mon.a@0(leader) e1 ms_handle_reset 0x559cd1978480 127.0.0.1:0/1181108721 2016-03-08 15:19:04.814302 7f82a334e700 10 -- 127.0.0.1:7104/0 queue_reap 0x559cd1a26000 2016-03-08 15:19:04.814303 7f82a5352700 10 mon.a@0(leader) e1 reset/close on session client.? 127.0.0.1:0/1181108721 2016-03-08 15:19:04.814306 7f82a5352700 10 mon.a@0(leader) e1 remove_session 0x559cd19e9600 client.? 127.0.0.1:0/1181108721 2016-03-08 15:19:04.814307 7f82a334e700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978480).writer done 2016-03-08 15:19:04.814315 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper 2016-03-08 15:19:04.814323 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaping pipe 0x559cd1a26000 127.0.0.1:0/1181108721 2016-03-08 15:19:04.814327 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978480).discard_queue 2016-03-08 15:19:04.814330 7f82a6354700 10 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978480).unregister_pipe - not registered 2016-03-08 15:19:04.814333 7f82a6354700 20 -- 127.0.0.1:7104/0 >> 127.0.0.1:0/1181108721 pipe(0x559cd1a26000 sd=21 :7104 s=4 pgs=1 cs=1 l=1 c=0x559cd1978480).join 2016-03-08 15:19:04.814341 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper reaped pipe 0x559cd1a26000 127.0.0.1:0/1181108721 2016-03-08 15:19:04.814349 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper deleted pipe 0x559cd1a26000 2016-03-08 15:19:04.814351 7f82a6354700 10 -- 127.0.0.1:7104/0 reaper done 2016-03-08 15:19:04.818849 7f82a5b53700 10 mon.a@0(leader).paxosservice(logm 1..14) propose_pending 2016-03-08 15:19:04.818861 7f82a5b53700 10 mon.a@0(leader).log v14 encode_full log v 14 2016-03-08 15:19:04.818899 7f82a5b53700 10 mon.a@0(leader).log v14 encode_pending v15 2016-03-08 15:19:04.818909 7f82a5b53700 5 mon.a@0(leader).paxos(paxos active c 1..29) queue_pending_finisher 0x559cd1494660 2016-03-08 15:19:04.818912 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..29) trigger_propose active, proposing now 2016-03-08 15:19:04.818918 7f82a5b53700 10 mon.a@0(leader).paxos(paxos active c 1..29) propose_pending 30 9384 bytes 2016-03-08 15:19:04.818921 7f82a5b53700 10 mon.a@0(leader).paxos(paxos updating c 1..29) begin for 30 9384 bytes ../qa/workunits/ceph-helpers.sh:1267: display_logs: read file ../qa/workunits/ceph-helpers.sh:1268: display_logs: echo '======================= testdir/osd-crush/client.bootstrap-osd.log' ======================= testdir/osd-crush/client.bootstrap-osd.log ../qa/workunits/ceph-helpers.sh:1269: display_logs: cat testdir/osd-crush/client.bootstrap-osd.log ../qa/workunits/ceph-helpers.sh:1267: display_logs: read file ../qa/workunits/ceph-helpers.sh:1268: display_logs: echo '======================= testdir/osd-crush/osd.0.log' ======================= testdir/osd-crush/osd.0.log ../qa/workunits/ceph-helpers.sh:1269: display_logs: cat testdir/osd-crush/osd.0.log 2016-03-08 15:19:03.649367 7f8b3b3c0940 0 set uid:gid to 0:0 2016-03-08 15:19:03.649503 7f8b3b3c0940 0 ceph version 10.0.4-1828-g754d210 (754d2103e1c504377bebc72430c00f931fa397eb), process ceph-osd, pid 4180 2016-03-08 15:19:03.650964 7f8b3b3c0940 1 filestore(testdir/osd-crush/0) mkfs in testdir/osd-crush/0 2016-03-08 15:19:03.650974 7f8b3b3c0940 1 filestore(testdir/osd-crush/0) mkfs fsid is already set to ad2b7a59-c78c-449e-86c6-e537c3b12550 2016-03-08 15:19:03.650976 7f8b3b3c0940 1 filestore(testdir/osd-crush/0) write_version_stamp 4 2016-03-08 15:19:03.712590 7f8b3b3c0940 0 filestore(testdir/osd-crush/0) backend btrfs (magic 0x9123683e) 2016-03-08 15:19:03.773752 7f8b3b3c0940 1 filestore(testdir/osd-crush/0) leveldb db exists/created 2016-03-08 15:19:03.773892 7f8b3b3c0940 -1 journal FileJournal::_open: disabling aio for non-block journal. Use journal_force_aio to force use of aio anyway 2016-03-08 15:19:03.774043 7f8b3b3c0940 1 journal _open testdir/osd-crush/0/journal fd 10: 104857600 bytes, block size 4096 bytes, directio = 1, aio = 0 2016-03-08 15:19:03.786263 7f8b3b3c0940 0 filestore(testdir/osd-crush/0) mkjournal created journal on testdir/osd-crush/0/journal 2016-03-08 15:19:03.799386 7f8b3b3c0940 1 filestore(testdir/osd-crush/0) mkfs done in testdir/osd-crush/0 2016-03-08 15:19:03.799455 7f8b3b3c0940 0 filestore(testdir/osd-crush/0) backend btrfs (magic 0x9123683e) 2016-03-08 15:19:03.799677 7f8b3b3c0940 0 genericfilestorebackend(testdir/osd-crush/0) detect_features: FIEMAP ioctl is disabled via 'filestore fiemap' config option 2016-03-08 15:19:03.799681 7f8b3b3c0940 0 genericfilestorebackend(testdir/osd-crush/0) detect_features: SEEK_DATA/SEEK_HOLE is disabled via 'filestore seek data hole' config option 2016-03-08 15:19:03.799694 7f8b3b3c0940 0 genericfilestorebackend(testdir/osd-crush/0) detect_features: splice is supported 2016-03-08 15:19:03.831942 7f8b3b3c0940 0 genericfilestorebackend(testdir/osd-crush/0) detect_features: syncfs(2) syscall fully supported (by glibc and kernel) 2016-03-08 15:19:03.832081 7f8b3b3c0940 0 btrfsfilestorebackend(testdir/osd-crush/0) detect_feature: CLONE_RANGE ioctl is supported 2016-03-08 15:19:03.854411 7f8b3b3c0940 0 btrfsfilestorebackend(testdir/osd-crush/0) detect_feature: SNAP_CREATE is supported 2016-03-08 15:19:03.854501 7f8b3b3c0940 0 btrfsfilestorebackend(testdir/osd-crush/0) detect_feature: SNAP_DESTROY is supported 2016-03-08 15:19:03.855684 7f8b3b3c0940 0 btrfsfilestorebackend(testdir/osd-crush/0) detect_feature: START_SYNC is supported (transid 105189) 2016-03-08 15:19:03.869456 7f8b3b3c0940 0 btrfsfilestorebackend(testdir/osd-crush/0) detect_feature: WAIT_SYNC is supported 2016-03-08 15:19:03.872115 7f8b3b3c0940 0 btrfsfilestorebackend(testdir/osd-crush/0) detect_feature: SNAP_CREATE_V2 is supported 2016-03-08 15:19:03.918535 7f8b3b3c0940 1 leveldb: Delete type=3 #1 2016-03-08 15:19:03.918621 7f8b3b3c0940 0 filestore(testdir/osd-crush/0) mount: enabling PARALLEL journal mode: fs, checkpoint is enabled 2016-03-08 15:19:03.918681 7f8b3b3c0940 -1 journal FileJournal::_open: disabling aio for non-block journal. Use journal_force_aio to force use of aio anyway 2016-03-08 15:19:03.918682 7f8b3b3c0940 1 journal _open testdir/osd-crush/0/journal fd 15: 104857600 bytes, block size 4096 bytes, directio = 1, aio = 0 2016-03-08 15:19:03.918911 7f8b3b3c0940 1 journal _open testdir/osd-crush/0/journal fd 15: 104857600 bytes, block size 4096 bytes, directio = 1, aio = 0 2016-03-08 15:19:03.919034 7f8b3b3c0940 1 filestore(testdir/osd-crush/0) upgrade 2016-03-08 15:19:03.919062 7f8b3b3c0940 -1 filestore(testdir/osd-crush/0) could not find !-1:7b3f43c4:::osd_superblock:0! in index: (2) No such file or directory 2016-03-08 15:19:03.964708 7f8b3b3c0940 1 journal close testdir/osd-crush/0/journal 2016-03-08 15:19:03.978574 7f8b3b3c0940 -1 created object store testdir/osd-crush/0 for osd.0 fsid c4878902-7748-4a77-afab-07655d3b0201 2016-03-08 15:19:03.978601 7f8b3b3c0940 -1 auth: error reading file: testdir/osd-crush/0/keyring: can't open testdir/osd-crush/0/keyring: (2) No such file or directory 2016-03-08 15:19:03.978733 7f8b3b3c0940 -1 created new key in keyring testdir/osd-crush/0/keyring 2016-03-08 15:19:04.191102 7efd579c6940 0 ceph version 10.0.4-1828-g754d210 (754d2103e1c504377bebc72430c00f931fa397eb), process ceph-osd, pid 4256 2016-03-08 15:19:04.191116 7efd579c6940 5 object store type is filestore 2016-03-08 15:19:04.197474 7efd579c6940 10 ErasureCodePluginSelectJerasure: load: jerasure_sse4 2016-03-08 15:19:04.199597 7efd579c6940 10 load: jerasure load: lrc load: isa 2016-03-08 15:19:04.199896 7efd579c6940 2 osd.0 0 mounting testdir/osd-crush/0 testdir/osd-crush/0/journal 2016-03-08 15:19:04.199939 7efd579c6940 0 filestore(testdir/osd-crush/0) backend btrfs (magic 0x9123683e) 2016-03-08 15:19:04.200139 7efd579c6940 0 genericfilestorebackend(testdir/osd-crush/0) detect_features: FIEMAP ioctl is disabled via 'filestore fiemap' config option 2016-03-08 15:19:04.200142 7efd579c6940 0 genericfilestorebackend(testdir/osd-crush/0) detect_features: SEEK_DATA/SEEK_HOLE is disabled via 'filestore seek data hole' config option 2016-03-08 15:19:04.200154 7efd579c6940 0 genericfilestorebackend(testdir/osd-crush/0) detect_features: splice is supported 2016-03-08 15:19:04.230391 7efd579c6940 0 genericfilestorebackend(testdir/osd-crush/0) detect_features: syncfs(2) syscall fully supported (by glibc and kernel) 2016-03-08 15:19:04.230534 7efd579c6940 0 btrfsfilestorebackend(testdir/osd-crush/0) detect_feature: CLONE_RANGE ioctl is supported 2016-03-08 15:19:04.253278 7efd579c6940 0 btrfsfilestorebackend(testdir/osd-crush/0) detect_feature: SNAP_CREATE is supported 2016-03-08 15:19:04.253381 7efd579c6940 0 btrfsfilestorebackend(testdir/osd-crush/0) detect_feature: SNAP_DESTROY is supported 2016-03-08 15:19:04.254534 7efd579c6940 0 btrfsfilestorebackend(testdir/osd-crush/0) detect_feature: START_SYNC is supported (transid 105196) 2016-03-08 15:19:04.265338 7efd579c6940 0 btrfsfilestorebackend(testdir/osd-crush/0) detect_feature: WAIT_SYNC is supported 2016-03-08 15:19:04.267968 7efd579c6940 0 btrfsfilestorebackend(testdir/osd-crush/0) detect_feature: SNAP_CREATE_V2 is supported 2016-03-08 15:19:04.289796 7efd579c6940 1 leveldb: Recovering log #3 2016-03-08 15:19:04.303057 7efd579c6940 1 leveldb: Delete type=0 #3 2016-03-08 15:19:04.303092 7efd579c6940 1 leveldb: Delete type=3 #2 2016-03-08 15:19:04.303191 7efd579c6940 0 filestore(testdir/osd-crush/0) mount: enabling PARALLEL journal mode: fs, checkpoint is enabled 2016-03-08 15:19:04.303294 7efd579c6940 -1 journal FileJournal::_open: disabling aio for non-block journal. Use journal_force_aio to force use of aio anyway 2016-03-08 15:19:04.303296 7efd579c6940 1 journal _open testdir/osd-crush/0/journal fd 20: 104857600 bytes, block size 4096 bytes, directio = 1, aio = 0 2016-03-08 15:19:04.303493 7efd579c6940 1 journal _open testdir/osd-crush/0/journal fd 20: 104857600 bytes, block size 4096 bytes, directio = 1, aio = 0 2016-03-08 15:19:04.303615 7efd579c6940 1 filestore(testdir/osd-crush/0) upgrade 2016-03-08 15:19:04.303629 7efd579c6940 2 osd.0 0 boot 2016-03-08 15:19:04.303702 7efd579c6940 10 osd.0 0 read_superblock sb(c4878902-7748-4a77-afab-07655d3b0201 osd.0 ad2b7a59-c78c-449e-86c6-e537c3b12550 e0 [0,0] lci=[0,0]) 2016-03-08 15:19:04.303727 7efd579c6940 10 osd.0 0 init creating/touching snapmapper object 2016-03-08 15:19:04.303899 7efd579c6940 10 open_all_classes 2016-03-08 15:19:04.304109 7efd579c6940 10 open_all_classes found numops 2016-03-08 15:19:04.304115 7efd579c6940 10 _get_class adding new class name numops 0x5643b4ee62c0 2016-03-08 15:19:04.304118 7efd579c6940 10 _load_class numops from .libs/libcls_numops.so 2016-03-08 15:19:04.304257 7efd579c6940 10 register_class numops status 3 2016-03-08 15:19:04.304261 7efd579c6940 10 register_cxx_method numops.add flags 3 0x7efd45823c80 2016-03-08 15:19:04.304263 7efd579c6940 10 register_cxx_method numops.mul flags 3 0x7efd45824710 2016-03-08 15:19:04.304264 7efd579c6940 10 _load_class numops success 2016-03-08 15:19:04.304265 7efd579c6940 10 open_all_classes found rbd 2016-03-08 15:19:04.304266 7efd579c6940 10 _get_class adding new class name rbd 0x5643b4ee6400 2016-03-08 15:19:04.304267 7efd579c6940 10 _load_class rbd from .libs/libcls_rbd.so 2016-03-08 15:19:04.304533 7efd579c6940 10 register_class rbd status 3 2016-03-08 15:19:04.304537 7efd579c6940 10 register_cxx_method rbd.create flags 3 0x7efd45602330 2016-03-08 15:19:04.304538 7efd579c6940 10 register_cxx_method rbd.get_features flags 1 0x7efd45606030 2016-03-08 15:19:04.304540 7efd579c6940 10 register_cxx_method rbd.set_features flags 3 0x7efd4560ada0 2016-03-08 15:19:04.304542 7efd579c6940 10 register_cxx_method rbd.get_size flags 1 0x7efd45605c60 2016-03-08 15:19:04.304544 7efd579c6940 10 register_cxx_method rbd.set_size flags 3 0x7efd45604dc0 2016-03-08 15:19:04.304545 7efd579c6940 10 register_cxx_method rbd.get_snapcontext flags 1 0x7efd4560c4c0 2016-03-08 15:19:04.304547 7efd579c6940 10 register_cxx_method rbd.get_object_prefix flags 1 0x7efd45603ab0 2016-03-08 15:19:04.304548 7efd579c6940 10 register_cxx_method rbd.get_snapshot_name flags 1 0x7efd456033c0 2016-03-08 15:19:04.304550 7efd579c6940 10 register_cxx_method rbd.snapshot_add flags 3 0x7efd45610600 2016-03-08 15:19:04.304551 7efd579c6940 10 register_cxx_method rbd.snapshot_remove flags 3 0x7efd45603610 2016-03-08 15:19:04.304552 7efd579c6940 10 register_cxx_method rbd.snapshot_rename flags 3 0x7efd4560c8e0 2016-03-08 15:19:04.304555 7efd579c6940 10 register_cxx_method rbd.get_all_features flags 1 0x7efd456007e0 2016-03-08 15:19:04.304557 7efd579c6940 10 register_cxx_method rbd.copyup flags 3 0x7efd45600880 2016-03-08 15:19:04.304558 7efd579c6940 10 register_cxx_method rbd.get_parent flags 1 0x7efd45607030 2016-03-08 15:19:04.304559 7efd579c6940 10 register_cxx_method rbd.set_parent flags 3 0x7efd456074b0 2016-03-08 15:19:04.304560 7efd579c6940 10 register_cxx_method rbd.remove_parent flags 3 0x7efd4560b190 2016-03-08 15:19:04.304561 7efd579c6940 10 register_cxx_method rbd.set_protection_status flags 3 0x7efd45606c00 2016-03-08 15:19:04.304562 7efd579c6940 10 register_cxx_method rbd.get_protection_status flags 1 0x7efd456030e0 2016-03-08 15:19:04.304562 7efd579c6940 10 register_cxx_method rbd.get_stripe_unit_count flags 1 0x7efd456064e0 2016-03-08 15:19:04.304563 7efd579c6940 10 register_cxx_method rbd.set_stripe_unit_count flags 3 0x7efd45606740 2016-03-08 15:19:04.304565 7efd579c6940 10 register_cxx_method rbd.get_flags flags 1 0x7efd45605910 2016-03-08 15:19:04.304566 7efd579c6940 10 register_cxx_method rbd.set_flags flags 3 0x7efd45605320 2016-03-08 15:19:04.304566 7efd579c6940 10 register_cxx_method rbd.metadata_list flags 1 0x7efd45611c60 2016-03-08 15:19:04.304567 7efd579c6940 10 register_cxx_method rbd.metadata_set flags 3 0x7efd456123d0 2016-03-08 15:19:04.304568 7efd579c6940 10 register_cxx_method rbd.metadata_remove flags 3 0x7efd45601d00 2016-03-08 15:19:04.304568 7efd579c6940 10 register_cxx_method rbd.metadata_get flags 1 0x7efd45608a80 2016-03-08 15:19:04.304569 7efd579c6940 10 register_cxx_method rbd.add_child flags 3 0x7efd4560bfc0 2016-03-08 15:19:04.304570 7efd579c6940 10 register_cxx_method rbd.remove_child flags 3 0x7efd4560e450 2016-03-08 15:19:04.304570 7efd579c6940 10 register_cxx_method rbd.get_children flags 1 0x7efd4560bc50 2016-03-08 15:19:04.304571 7efd579c6940 10 register_cxx_method rbd.get_id flags 1 0x7efd456095c0 2016-03-08 15:19:04.304572 7efd579c6940 10 register_cxx_method rbd.set_id flags 3 0x7efd45602140 2016-03-08 15:19:04.304572 7efd579c6940 10 register_cxx_method rbd.dir_get_id flags 1 0x7efd45603c70 2016-03-08 15:19:04.304573 7efd579c6940 10 register_cxx_method rbd.dir_get_name flags 1 0x7efd45604580 2016-03-08 15:19:04.304574 7efd579c6940 10 register_cxx_method rbd.dir_list flags 1 0x7efd45612ba0 2016-03-08 15:19:04.304574 7efd579c6940 10 register_cxx_method rbd.dir_add_image flags 3 0x7efd45611a60 2016-03-08 15:19:04.304575 7efd579c6940 10 register_cxx_method rbd.dir_remove_image flags 3 0x7efd45604400 2016-03-08 15:19:04.304576 7efd579c6940 10 register_cxx_method rbd.dir_rename_image flags 3 0x7efd45611820 2016-03-08 15:19:04.304577 7efd579c6940 10 register_cxx_method rbd.object_map_load flags 1 0x7efd4560eba0 2016-03-08 15:19:04.304579 7efd579c6940 10 register_cxx_method rbd.object_map_save flags 3 0x7efd4560ef50 2016-03-08 15:19:04.304580 7efd579c6940 10 register_cxx_method rbd.object_map_resize flags 3 0x7efd4560fdf0 2016-03-08 15:19:04.304580 7efd579c6940 10 register_cxx_method rbd.object_map_update flags 3 0x7efd4560f530 2016-03-08 15:19:04.304586 7efd579c6940 10 register_cxx_method rbd.object_map_snap_add flags 3 0x7efd4560ece0 2016-03-08 15:19:04.304587 7efd579c6940 10 register_cxx_method rbd.object_map_snap_remove flags 3 0x7efd4560f1a0 2016-03-08 15:19:04.304588 7efd579c6940 10 register_cxx_method rbd.snap_list flags 1 0x7efd45607c90 2016-03-08 15:19:04.304589 7efd579c6940 10 register_cxx_method rbd.snap_add flags 3 0x7efd45609880 2016-03-08 15:19:04.304589 7efd579c6940 10 register_cxx_method rbd.snap_remove flags 3 0x7efd45609e00 2016-03-08 15:19:04.304590 7efd579c6940 10 register_cxx_method rbd.snap_rename flags 3 0x7efd4560a570 2016-03-08 15:19:04.304591 7efd579c6940 10 register_cxx_method rbd.mirror_mode_get flags 1 0x7efd45607c30 2016-03-08 15:19:04.304591 7efd579c6940 10 register_cxx_method rbd.mirror_mode_set flags 3 0x7efd4560daa0 2016-03-08 15:19:04.304592 7efd579c6940 10 register_cxx_method rbd.mirror_peer_list flags 1 0x7efd4560ddf0 2016-03-08 15:19:04.304593 7efd579c6940 10 register_cxx_method rbd.mirror_peer_add flags 3 0x7efd4560df60 2016-03-08 15:19:04.304594 7efd579c6940 10 register_cxx_method rbd.mirror_peer_remove flags 3 0x7efd45601f30 2016-03-08 15:19:04.304595 7efd579c6940 10 register_cxx_method rbd.mirror_peer_set_client flags 3 0x7efd456081d0 2016-03-08 15:19:04.304596 7efd579c6940 10 register_cxx_method rbd.mirror_peer_set_cluster flags 3 0x7efd45608450 2016-03-08 15:19:04.304596 7efd579c6940 10 register_cxx_method rbd.mirror_image_list flags 1 0x7efd45610470 2016-03-08 15:19:04.304597 7efd579c6940 10 register_cxx_method rbd.mirror_image_get flags 1 0x7efd45608900 2016-03-08 15:19:04.304598 7efd579c6940 10 register_cxx_method rbd.mirror_image_set flags 3 0x7efd45609450 2016-03-08 15:19:04.304599 7efd579c6940 10 register_cxx_method rbd.mirror_image_remove flags 3 0x7efd45609030 2016-03-08 15:19:04.304599 7efd579c6940 10 _load_class rbd success 2016-03-08 15:19:04.304601 7efd579c6940 10 open_all_classes found lock 2016-03-08 15:19:04.304602 7efd579c6940 10 _get_class adding new class name lock 0x5643b4ee6540 2016-03-08 15:19:04.304603 7efd579c6940 10 _load_class lock from .libs/libcls_lock.so 2016-03-08 15:19:04.304690 7efd579c6940 10 register_class lock status 3 2016-03-08 15:19:04.304693 7efd579c6940 10 register_cxx_method lock.lock flags 11 0x7efd453f0700 2016-03-08 15:19:04.304694 7efd579c6940 10 register_cxx_method lock.unlock flags 11 0x7efd453f0170 2016-03-08 15:19:04.304695 7efd579c6940 10 register_cxx_method lock.break_lock flags 3 0x7efd453efc00 2016-03-08 15:19:04.304696 7efd579c6940 10 register_cxx_method lock.get_info flags 1 0x7efd453f15c0 2016-03-08 15:19:04.304697 7efd579c6940 10 register_cxx_method lock.list_locks flags 1 0x7efd453ee6e0 2016-03-08 15:19:04.304697 7efd579c6940 10 register_cxx_method lock.assert_locked flags 9 0x7efd453ef030 2016-03-08 15:19:04.304698 7efd579c6940 10 _load_class lock success 2016-03-08 15:19:04.304699 7efd579c6940 10 open_all_classes found version 2016-03-08 15:19:04.304700 7efd579c6940 10 _get_class adding new class name version 0x5643b4ee6680 2016-03-08 15:19:04.304701 7efd579c6940 10 _load_class version from .libs/libcls_version.so 2016-03-08 15:19:04.304772 7efd579c6940 10 register_class version status 3 2016-03-08 15:19:04.304773 7efd579c6940 10 register_cxx_method version.set flags 3 0x7efd451e5d40 2016-03-08 15:19:04.304774 7efd579c6940 10 register_cxx_method version.inc flags 3 0x7efd451e6500 2016-03-08 15:19:04.304775 7efd579c6940 10 register_cxx_method version.inc_conds flags 3 0x7efd451e6500 2016-03-08 15:19:04.304776 7efd579c6940 10 register_cxx_method version.read flags 1 0x7efd451e5a10 2016-03-08 15:19:04.304778 7efd579c6940 10 register_cxx_method version.check_conds flags 1 0x7efd451e6dd0 2016-03-08 15:19:04.304778 7efd579c6940 10 _load_class version success 2016-03-08 15:19:04.304779 7efd579c6940 10 open_all_classes found log 2016-03-08 15:19:04.304780 7efd579c6940 10 _get_class adding new class name log 0x5643b4ee67c0 2016-03-08 15:19:04.304781 7efd579c6940 10 _load_class log from .libs/libcls_log.so 2016-03-08 15:19:04.304852 7efd579c6940 10 register_class log status 3 2016-03-08 15:19:04.304852 7efd579c6940 10 register_cxx_method log.add flags 3 0x7efd44fdc480 2016-03-08 15:19:04.304854 7efd579c6940 10 register_cxx_method log.list flags 1 0x7efd44fdd620 2016-03-08 15:19:04.304854 7efd579c6940 10 register_cxx_method log.trim flags 3 0x7efd44fdcdd0 2016-03-08 15:19:04.304855 7efd579c6940 10 register_cxx_method log.info flags 1 0x7efd44fdbe20 2016-03-08 15:19:04.304858 7efd579c6940 10 _load_class log success 2016-03-08 15:19:04.304859 7efd579c6940 10 open_all_classes found statelog 2016-03-08 15:19:04.304860 7efd579c6940 10 _get_class adding new class name statelog 0x5643b4ee6900 2016-03-08 15:19:04.304860 7efd579c6940 10 _load_class statelog from .libs/libcls_statelog.so 2016-03-08 15:19:04.304927 7efd579c6940 10 register_class statelog status 3 2016-03-08 15:19:04.304928 7efd579c6940 10 register_cxx_method statelog.add flags 3 0x7efd44dd37b0 2016-03-08 15:19:04.304929 7efd579c6940 10 register_cxx_method statelog.list flags 1 0x7efd44dd4780 2016-03-08 15:19:04.304930 7efd579c6940 10 register_cxx_method statelog.remove flags 3 0x7efd44dd2590 2016-03-08 15:19:04.304931 7efd579c6940 10 register_cxx_method statelog.check_state flags 1 0x7efd44dd2e50 2016-03-08 15:19:04.304931 7efd579c6940 10 _load_class statelog success 2016-03-08 15:19:04.304933 7efd579c6940 10 open_all_classes found timeindex 2016-03-08 15:19:04.304934 7efd579c6940 10 _get_class adding new class name timeindex 0x5643b4ee6a40 2016-03-08 15:19:04.304934 7efd579c6940 10 _load_class timeindex from .libs/libcls_timeindex.so 2016-03-08 15:19:04.305012 7efd579c6940 10 register_class timeindex status 3 2016-03-08 15:19:04.305013 7efd579c6940 10 register_cxx_method timeindex.add flags 3 0x7efd44bc99a0 2016-03-08 15:19:04.305014 7efd579c6940 10 register_cxx_method timeindex.list flags 1 0x7efd44bca3d0 2016-03-08 15:19:04.305014 7efd579c6940 10 register_cxx_method timeindex.trim flags 3 0x7efd44bc9b60 2016-03-08 15:19:04.305015 7efd579c6940 10 _load_class timeindex success 2016-03-08 15:19:04.305016 7efd579c6940 10 open_all_classes found replica_log 2016-03-08 15:19:04.305018 7efd579c6940 10 _get_class adding new class name replica_log 0x5643b4ee6b80 2016-03-08 15:19:04.305019 7efd579c6940 10 _load_class replica_log from .libs/libcls_replica_log.so 2016-03-08 15:19:04.305087 7efd579c6940 10 register_class replica_log status 3 2016-03-08 15:19:04.305088 7efd579c6940 10 register_cxx_method replica_log.set flags 3 0x7efd449c3030 2016-03-08 15:19:04.305089 7efd579c6940 10 register_cxx_method replica_log.get flags 1 0x7efd449c12a0 2016-03-08 15:19:04.305089 7efd579c6940 10 register_cxx_method replica_log.delete flags 3 0x7efd449c28c0 2016-03-08 15:19:04.305090 7efd579c6940 10 _load_class replica_log success 2016-03-08 15:19:04.305091 7efd579c6940 10 open_all_classes found user 2016-03-08 15:19:04.305092 7efd579c6940 10 _get_class adding new class name user 0x5643b4ee6cc0 2016-03-08 15:19:04.305092 7efd579c6940 10 _load_class user from .libs/libcls_user.so 2016-03-08 15:19:04.305160 7efd579c6940 10 register_class user status 3 2016-03-08 15:19:04.305162 7efd579c6940 10 register_cxx_method user.set_buckets_info flags 3 0x7efd447b6910 2016-03-08 15:19:04.305163 7efd579c6940 10 register_cxx_method user.complete_stats_sync flags 3 0x7efd447b6140 2016-03-08 15:19:04.305164 7efd579c6940 10 register_cxx_method user.remove_bucket flags 3 0x7efd447b52d0 2016-03-08 15:19:04.305164 7efd579c6940 10 register_cxx_method user.list_buckets flags 1 0x7efd447b7880 2016-03-08 15:19:04.305166 7efd579c6940 10 register_cxx_method user.get_header flags 1 0x7efd447b3e80 2016-03-08 15:19:04.305167 7efd579c6940 10 _load_class user success 2016-03-08 15:19:04.305168 7efd579c6940 10 open_all_classes found cephfs 2016-03-08 15:19:04.305169 7efd579c6940 10 _get_class adding new class name cephfs 0x5643b4ee6e00 2016-03-08 15:19:04.305171 7efd579c6940 10 _load_class cephfs from .libs/libcls_cephfs.so 2016-03-08 15:19:04.305243 7efd579c6940 0 cls/cephfs/cls_cephfs.cc:202: loading cephfs_size_scan 2016-03-08 15:19:04.305245 7efd579c6940 10 register_class cephfs status 3 2016-03-08 15:19:04.305246 7efd579c6940 10 register_cxx_method cephfs.accumulate_inode_metadata flags 3 0x7efd445ad770 2016-03-08 15:19:04.305247 7efd579c6940 10 _load_class cephfs success 2016-03-08 15:19:04.305248 7efd579c6940 10 open_all_classes found journal 2016-03-08 15:19:04.305249 7efd579c6940 10 _get_class adding new class name journal 0x5643b4ee6f40 2016-03-08 15:19:04.305250 7efd579c6940 10 _load_class journal from .libs/libcls_journal.so 2016-03-08 15:19:04.305359 7efd579c6940 10 register_class journal status 3 2016-03-08 15:19:04.305361 7efd579c6940 10 register_cxx_method journal.create flags 3 0x7efd4439f090 2016-03-08 15:19:04.305362 7efd579c6940 10 register_cxx_method journal.get_order flags 1 0x7efd4439d800 2016-03-08 15:19:04.305363 7efd579c6940 10 register_cxx_method journal.get_splay_width flags 1 0x7efd4439d7a0 2016-03-08 15:19:04.305363 7efd579c6940 10 register_cxx_method journal.get_pool_id flags 1 0x7efd4439cf10 2016-03-08 15:19:04.305364 7efd579c6940 10 register_cxx_method journal.get_minimum_set flags 1 0x7efd4439dde0 2016-03-08 15:19:04.305370 7efd579c6940 10 register_cxx_method journal.set_minimum_set flags 3 0x7efd4439dc60 2016-03-08 15:19:04.305371 7efd579c6940 10 register_cxx_method journal.get_active_set flags 1 0x7efd4439dbf0 2016-03-08 15:19:04.305372 7efd579c6940 10 register_cxx_method journal.set_active_set flags 3 0x7efd4439da70 2016-03-08 15:19:04.305373 7efd579c6940 10 register_cxx_method journal.get_client flags 1 0x7efd4439e190 2016-03-08 15:19:04.305374 7efd579c6940 10 register_cxx_method journal.client_register flags 3 0x7efd4439e470 2016-03-08 15:19:04.305375 7efd579c6940 10 register_cxx_method journal.client_update flags 3 0x7efd4439f450 2016-03-08 15:19:04.305376 7efd579c6940 10 register_cxx_method journal.client_unregister flags 3 0x7efd443a1da0 2016-03-08 15:19:04.305377 7efd579c6940 10 register_cxx_method journal.client_commit flags 3 0x7efd4439ec20 2016-03-08 15:19:04.305379 7efd579c6940 10 register_cxx_method journal.client_list flags 1 0x7efd4439f910 2016-03-08 15:19:04.305380 7efd579c6940 10 register_cxx_method journal.get_next_tag_tid flags 1 0x7efd4439da00 2016-03-08 15:19:04.305381 7efd579c6940 10 register_cxx_method journal.get_tag flags 1 0x7efd4439de50 2016-03-08 15:19:04.305383 7efd579c6940 10 register_cxx_method journal.tag_create flags 3 0x7efd443a2050 2016-03-08 15:19:04.305383 7efd579c6940 10 register_cxx_method journal.tag_list flags 1 0x7efd443a00f0 2016-03-08 15:19:04.305384 7efd579c6940 10 register_cxx_method journal.guard_append flags 3 0x7efd4439c4f0 2016-03-08 15:19:04.305385 7efd579c6940 10 _load_class journal success 2016-03-08 15:19:04.305395 7efd579c6940 10 open_all_classes found hello 2016-03-08 15:19:04.305396 7efd579c6940 10 _get_class adding new class name hello 0x5643b4ee7080 2016-03-08 15:19:04.305397 7efd579c6940 10 _load_class hello from .libs/libcls_hello.so 2016-03-08 15:19:04.305498 7efd579c6940 0 cls/hello/cls_hello.cc:305: loading cls_hello 2016-03-08 15:19:04.305503 7efd579c6940 10 register_class hello status 3 2016-03-08 15:19:04.305504 7efd579c6940 10 register_cxx_method hello.say_hello flags 1 0x7efd44193800 2016-03-08 15:19:04.305512 7efd579c6940 10 register_cxx_method hello.record_hello flags 10 0x7efd44194670 2016-03-08 15:19:04.305514 7efd579c6940 10 register_cxx_method hello.writes_dont_return_data flags 2 0x7efd44193b20 2016-03-08 15:19:04.305516 7efd579c6940 10 register_cxx_method hello.replay flags 1 0x7efd44193700 2016-03-08 15:19:04.305517 7efd579c6940 10 register_cxx_method hello.turn_it_to_11 flags 11 0x7efd44193d90 2016-03-08 15:19:04.305518 7efd579c6940 10 register_cxx_method hello.bad_reader flags 2 0x7efd441936f0 2016-03-08 15:19:04.305519 7efd579c6940 10 register_cxx_method hello.bad_writer flags 1 0x7efd441936e0 2016-03-08 15:19:04.305521 7efd579c6940 10 _load_class hello success 2016-03-08 15:19:04.305522 7efd579c6940 10 open_all_classes found refcount 2016-03-08 15:19:04.305524 7efd579c6940 10 _get_class adding new class name refcount 0x5643b4ee71c0 2016-03-08 15:19:04.305526 7efd579c6940 10 _load_class refcount from .libs/libcls_refcount.so 2016-03-08 15:19:04.307246 7efd579c6940 10 register_class refcount status 3 2016-03-08 15:19:04.307248 7efd579c6940 10 register_cxx_method refcount.get flags 3 0x7efd43efd7e0 2016-03-08 15:19:04.307249 7efd579c6940 10 register_cxx_method refcount.put flags 3 0x7efd43efc240 2016-03-08 15:19:04.307250 7efd579c6940 10 register_cxx_method refcount.set flags 3 0x7efd43efcff0 2016-03-08 15:19:04.307251 7efd579c6940 10 register_cxx_method refcount.read flags 1 0x7efd43efc870 2016-03-08 15:19:04.307252 7efd579c6940 10 _load_class refcount success 2016-03-08 15:19:04.307253 7efd579c6940 10 open_all_classes found rgw 2016-03-08 15:19:04.307255 7efd579c6940 10 _get_class adding new class name rgw 0x5643b4ee7300 2016-03-08 15:19:04.307263 7efd579c6940 10 _load_class rgw from .libs/libcls_rgw.so 2016-03-08 15:19:04.308669 7efd579c6940 10 register_class rgw status 3 2016-03-08 15:19:04.308670 7efd579c6940 10 register_cxx_method rgw.bucket_init_index flags 3 0x7efd43b83fd0 2016-03-08 15:19:04.308672 7efd579c6940 10 register_cxx_method rgw.bucket_set_tag_timeout flags 3 0x7efd43b8d3c0 2016-03-08 15:19:04.308683 7efd579c6940 10 register_cxx_method rgw.bucket_list flags 1 0x7efd43b8d8e0 2016-03-08 15:19:04.308684 7efd579c6940 10 register_cxx_method rgw.bucket_check_index flags 1 0x7efd43b90040 2016-03-08 15:19:04.308684 7efd579c6940 10 register_cxx_method rgw.bucket_rebuild_index flags 3 0x7efd43b8fe90 2016-03-08 15:19:04.308685 7efd579c6940 10 register_cxx_method rgw.bucket_prepare_op flags 3 0x7efd43b8f200 2016-03-08 15:19:04.308686 7efd579c6940 10 register_cxx_method rgw.bucket_complete_op flags 3 0x7efd43b903c0 2016-03-08 15:19:04.308687 7efd579c6940 10 register_cxx_method rgw.bucket_link_olh flags 3 0x7efd43b946f0 2016-03-08 15:19:04.308688 7efd579c6940 10 register_cxx_method rgw.bucket_unlink_instance flags 3 0x7efd43b93b10 2016-03-08 15:19:04.308689 7efd579c6940 10 register_cxx_method rgw.bucket_read_olh_log flags 1 0x7efd43b95ab0 2016-03-08 15:19:04.308690 7efd579c6940 10 register_cxx_method rgw.bucket_trim_olh_log flags 3 0x7efd43b930f0 2016-03-08 15:19:04.308690 7efd579c6940 10 register_cxx_method rgw.bucket_clear_olh flags 3 0x7efd43b92840 2016-03-08 15:19:04.308691 7efd579c6940 10 register_cxx_method rgw.obj_remove flags 3 0x7efd43b8c1b0 2016-03-08 15:19:04.308692 7efd579c6940 10 register_cxx_method rgw.obj_store_pg_ver flags 2 0x7efd43b82320 2016-03-08 15:19:04.308693 7efd579c6940 10 register_cxx_method rgw.obj_check_attrs_prefix flags 1 0x7efd43b842e0 2016-03-08 15:19:04.308693 7efd579c6940 10 register_cxx_method rgw.obj_check_mtime flags 1 0x7efd43b7f720 2016-03-08 15:19:04.308694 7efd579c6940 10 register_cxx_method rgw.bi_get flags 1 0x7efd43b80240 2016-03-08 15:19:04.308695 7efd579c6940 10 register_cxx_method rgw.bi_put flags 3 0x7efd43b819b0 2016-03-08 15:19:04.308696 7efd579c6940 10 register_cxx_method rgw.bi_list flags 1 0x7efd43b89d20 2016-03-08 15:19:04.308697 7efd579c6940 10 register_cxx_method rgw.bi_log_list flags 1 0x7efd43b86310 2016-03-08 15:19:04.308698 7efd579c6940 10 register_cxx_method rgw.bi_log_trim flags 3 0x7efd43b86aa0 2016-03-08 15:19:04.308698 7efd579c6940 10 register_cxx_method rgw.dir_suggest_changes flags 3 0x7efd43b91500 2016-03-08 15:19:04.308699 7efd579c6940 10 register_cxx_method rgw.user_usage_log_add flags 3 0x7efd43b88bf0 2016-03-08 15:19:04.308700 7efd579c6940 10 register_cxx_method rgw.user_usage_log_read flags 1 0x7efd43b87fb0 2016-03-08 15:19:04.308700 7efd579c6940 10 register_cxx_method rgw.user_usage_log_trim flags 3 0x7efd43b88a40 2016-03-08 15:19:04.308701 7efd579c6940 10 register_cxx_method rgw.gc_set_entry flags 3 0x7efd43b83370 2016-03-08 15:19:04.308702 7efd579c6940 10 register_cxx_method rgw.gc_defer_entry flags 3 0x7efd43b82e10 2016-03-08 15:19:04.308703 7efd579c6940 10 register_cxx_method rgw.gc_list flags 1 0x7efd43b84a80 2016-03-08 15:19:04.308703 7efd579c6940 10 register_cxx_method rgw.gc_remove flags 3 0x7efd43b83810 2016-03-08 15:19:04.308704 7efd579c6940 10 _load_class rgw success 2016-03-08 15:19:04.308706 7efd579c6940 10 open_all_classes found kvs 2016-03-08 15:19:04.308707 7efd579c6940 10 _get_class adding new class name kvs 0x5643b4ee7440 2016-03-08 15:19:04.308708 7efd579c6940 10 _load_class kvs from .libs/libcls_kvs.so 2016-03-08 15:19:04.308811 7efd579c6940 10 register_class kvs status 3 2016-03-08 15:19:04.308814 7efd579c6940 10 register_cxx_method kvs.get_idata_from_key flags 1 0x7efd438bf8e0 2016-03-08 15:19:04.308815 7efd579c6940 10 register_cxx_method kvs.get_next_idata flags 1 0x7efd438c0c70 2016-03-08 15:19:04.308815 7efd579c6940 10 register_cxx_method kvs.get_prev_idata flags 1 0x7efd438be880 2016-03-08 15:19:04.308816 7efd579c6940 10 register_cxx_method kvs.read_many flags 1 0x7efd438b82f0 2016-03-08 15:19:04.308816 7efd579c6940 10 register_cxx_method kvs.check_writable flags 3 0x7efd438b7c70 2016-03-08 15:19:04.308817 7efd579c6940 10 register_cxx_method kvs.assert_size_in_bound flags 2 0x7efd438b7ef0 2016-03-08 15:19:04.308828 7efd579c6940 10 register_cxx_method kvs.omap_insert flags 2 0x7efd438bd3d0 2016-03-08 15:19:04.308828 7efd579c6940 10 register_cxx_method kvs.create_with_omap flags 2 0x7efd438bc590 2016-03-08 15:19:04.308834 7efd579c6940 10 register_cxx_method kvs.omap_remove flags 2 0x7efd438b9220 2016-03-08 15:19:04.308835 7efd579c6940 10 register_cxx_method kvs.maybe_read_for_balance flags 1 0x7efd438ba4d0 2016-03-08 15:19:04.308835 7efd579c6940 10 _load_class kvs success 2016-03-08 15:19:04.308864 7efd579c6940 20 osd.0 0 get_map 0 - return initial 0x5643b4eda6c0 2016-03-08 15:19:04.308869 7efd579c6940 0 osd.0 0 crush map has features 33816576, adjusting msgr requires for clients 2016-03-08 15:19:04.308870 7efd579c6940 0 osd.0 0 crush map has features 33816576 was 8705, adjusting msgr requires for mons 2016-03-08 15:19:04.308872 7efd579c6940 0 osd.0 0 crush map has features 33816576, adjusting msgr requires for osds 2016-03-08 15:19:04.308873 7efd579c6940 10 osd.0 0 create_recoverystate_perf 2016-03-08 15:19:04.308879 7efd579c6940 10 osd.0 0 clear_temp_objects 2016-03-08 15:19:04.308892 7efd579c6940 0 osd.0 0 load_pgs 2016-03-08 15:19:04.308907 7efd579c6940 10 osd.0 0 load_pgs ignoring unrecognized meta 2016-03-08 15:19:04.308908 7efd579c6940 0 osd.0 0 load_pgs opened 0 pgs 2016-03-08 15:19:04.308909 7efd579c6940 10 osd.0 0 build_past_intervals_parallel nothing to build 2016-03-08 15:19:04.308910 7efd579c6940 2 osd.0 0 superblock: i am osd.0 2016-03-08 15:19:04.308910 7efd579c6940 0 osd.0 0 using 0 op queue with priority op cut off at 64. 2016-03-08 15:19:04.308911 7efd579c6940 10 osd.0 0 create_logger 2016-03-08 15:19:04.309159 7efd579c6940 -1 osd.0 0 log_to_monitors {default=true} 2016-03-08 15:19:04.309313 7efd579c6940 10 osd.0 0 set_disk_tp_priority class priority -1 2016-03-08 15:19:04.309366 7efd33092700 20 osd.0 0 update_osd_stat osd_stat(56456 MB used, 244 GB avail, 300 GB total, peers []/[] op hist []) 2016-03-08 15:19:04.309375 7efd33092700 5 osd.0 0 heartbeat: osd_stat(56456 MB used, 244 GB avail, 300 GB total, peers []/[] op hist []) 2016-03-08 15:19:04.309374 7efd3088d700 10 osd.0 0 agent_entry start 2016-03-08 15:19:04.309377 7efd3088d700 20 osd.0 0 agent_entry empty queue 2016-03-08 15:19:04.309593 7efd579b1700 10 osd.0 0 new session (outgoing) 0x5643b4e51800 con=0x5643b4e5a600 addr=127.0.0.1:6800/4256 2016-03-08 15:19:04.309615 7efd579b1700 10 osd.0 0 OSD::ms_get_authorizer type=mon 2016-03-08 15:19:04.309687 7efd438b3700 10 osd.0 0 ms_handle_connect on mon 2016-03-08 15:19:04.309693 7efd438b3700 10 osd.0 0 requeue_pg_temp 0 + 0 -> 0 2016-03-08 15:19:04.309695 7efd438b3700 10 osd.0 0 requeue_failures 0 + 0 -> 0 2016-03-08 15:19:04.309697 7efd438b3700 20 osd.0 0 send_pg_stats 2016-03-08 15:19:04.310070 7efd579c6940 10 osd.0 0 ensuring pgs have consumed prior maps 2016-03-08 15:19:04.310073 7efd579c6940 7 osd.0 0 consume_map version 0 2016-03-08 15:19:04.310078 7efd579c6940 0 osd.0 0 done with init, starting boot process 2016-03-08 15:19:04.310091 7efd579c6940 1 osd.0 0 We are healthy, booting 2016-03-08 15:19:04.310092 7efd579c6940 10 osd.0 0 start_boot - have maps 0..0 2016-03-08 15:19:04.310276 7efd438b3700 10 osd.0 0 do_waiters -- start 2016-03-08 15:19:04.310279 7efd438b3700 10 osd.0 0 do_waiters -- finish 2016-03-08 15:19:04.310280 7efd438b3700 20 osd.0 0 _dispatch 0x5643b4eec800 osd_map(4..4 src has 1..4) v3 2016-03-08 15:19:04.310315 7efd438b3700 3 osd.0 0 handle_osd_map epochs [4,4], i have 0, src has [1,4] 2016-03-08 15:19:04.310318 7efd438b3700 10 osd.0 0 handle_osd_map message skips epochs 1..3 2016-03-08 15:19:04.310327 7efd438b3700 10 osd.0 0 do_waiters -- start 2016-03-08 15:19:04.310328 7efd438b3700 10 osd.0 0 do_waiters -- finish 2016-03-08 15:19:04.310415 7efd3b0a2700 10 osd.0 0 _preboot _preboot mon has osdmaps 1..4 2016-03-08 15:19:04.310421 7efd3b0a2700 10 osd.0 0 _send_boot 2016-03-08 15:19:04.310425 7efd3b0a2700 10 osd.0 0 new session (outgoing) 0x5643b4e51a00 con=0x5643b4e5a780 addr=127.0.0.1:6801/4256 2016-03-08 15:19:04.310433 7efd3b0a2700 10 osd.0 0 assuming cluster_addr ip matches client_addr 2016-03-08 15:19:04.310435 7efd3b0a2700 10 osd.0 0 assuming hb_back_addr ip matches cluster_addr 2016-03-08 15:19:04.310436 7efd3b0a2700 10 osd.0 0 assuming hb_front_addr ip matches client_addr 2016-03-08 15:19:04.310442 7efd3b0a2700 10 osd.0 0 client_addr 127.0.0.1:6800/4256, cluster_addr 127.0.0.1:6801/4256, hb_back_addr 127.0.0.1:6802/4256, hb_front_addr 127.0.0.1:6803/4256 2016-03-08 15:19:04.449054 7efd3b0a2700 10 osd.0 0 _collect_metadata {arch=x86_64,back_addr=127.0.0.1:6801/4256,backend_filestore_dev_node=unknown,backend_filestore_partition_path=unknown,ceph_version=ceph version 10.0.4-1828-g754d210 (754d2103e1c504377bebc72430c00f931fa397eb),cpu=Intel(R) Core(TM) i7-4810MQ CPU @ 2.80GHz,distro=Fedora,distro_codename=TwentyThree,distro_description=Fedora release 23 (Twenty Three),distro_version=23,filestore_backend=btrfs,filestore_f_type=0x9123683e,front_addr=127.0.0.1:6800/4256,hb_back_addr=127.0.0.1:6802/4256,hb_front_addr=127.0.0.1:6803/4256,hostname=R1,kernel_description=#1 SMP Fri Feb 26 18:45:40 UTC 2016,kernel_version=4.4.3-300.fc23.x86_64,mem_swap_kb=0,mem_total_kb=32568744,os=Linux,osd_data=testdir/osd-crush/0,osd_journal=testdir/osd-crush/0/journal,osd_objectstore=filestore} 2016-03-08 15:19:04.449116 7efd438b3700 10 osd.0 0 do_waiters -- start 2016-03-08 15:19:04.449124 7efd438b3700 10 osd.0 0 do_waiters -- finish 2016-03-08 15:19:04.449127 7efd438b3700 20 osd.0 0 _dispatch 0x5643b4eeca80 osd_map(1..4 src has 1..4) v3 2016-03-08 15:19:04.449146 7efd438b3700 3 osd.0 0 handle_osd_map epochs [1,4], i have 0, src has [1,4] 2016-03-08 15:19:04.449151 7efd438b3700 10 osd.0 0 handle_osd_map got inc map for epoch 1 2016-03-08 15:19:04.449235 7efd438b3700 20 osd.0 0 got_full_map 1, nothing requested 2016-03-08 15:19:04.449253 7efd438b3700 10 osd.0 0 handle_osd_map got inc map for epoch 2 2016-03-08 15:19:04.449277 7efd438b3700 20 osd.0 0 got_full_map 2, nothing requested 2016-03-08 15:19:04.449299 7efd438b3700 10 osd.0 0 handle_osd_map got inc map for epoch 3 2016-03-08 15:19:04.449323 7efd438b3700 20 osd.0 0 got_full_map 3, nothing requested 2016-03-08 15:19:04.449333 7efd438b3700 10 osd.0 0 handle_osd_map got inc map for epoch 4 2016-03-08 15:19:04.449356 7efd438b3700 20 osd.0 0 got_full_map 4, nothing requested 2016-03-08 15:19:04.449371 7efd438b3700 10 osd.0 0 advance to epoch 1 (<= newest 4) 2016-03-08 15:19:04.449375 7efd438b3700 7 osd.0 1 advance_map epoch 1 2016-03-08 15:19:04.449376 7efd438b3700 10 osd.0 1 advance to epoch 2 (<= newest 4) 2016-03-08 15:19:04.449377 7efd438b3700 7 osd.0 2 advance_map epoch 2 2016-03-08 15:19:04.449378 7efd438b3700 10 osd.0 2 advance to epoch 3 (<= newest 4) 2016-03-08 15:19:04.449379 7efd438b3700 7 osd.0 3 advance_map epoch 3 2016-03-08 15:19:04.449379 7efd438b3700 10 osd.0 3 advance to epoch 4 (<= newest 4) 2016-03-08 15:19:04.449380 7efd438b3700 7 osd.0 4 advance_map epoch 4 2016-03-08 15:19:04.449381 7efd438b3700 10 osd.0 4 write_superblock sb(c4878902-7748-4a77-afab-07655d3b0201 osd.0 ad2b7a59-c78c-449e-86c6-e537c3b12550 e4 [1,4] lci=[0,0]) 2016-03-08 15:19:04.449440 7efd438b3700 0 osd.0 4 crush map has features 1107558400, adjusting msgr requires for clients 2016-03-08 15:19:04.449445 7efd438b3700 0 osd.0 4 crush map has features 1107558400 was 33825281, adjusting msgr requires for mons 2016-03-08 15:19:04.449448 7efd438b3700 0 osd.0 4 crush map has features 1107558400, adjusting msgr requires for osds 2016-03-08 15:19:04.449449 7efd438b3700 7 osd.0 4 consume_map version 4 2016-03-08 15:19:04.449453 7efd438b3700 10 osd.0 4 not yet active; waiting for peering wq to drain 2016-03-08 15:19:04.449456 7efd438b3700 10 osd.0 4 do_waiters -- start 2016-03-08 15:19:04.449457 7efd438b3700 10 osd.0 4 do_waiters -- finish 2016-03-08 15:19:04.585400 7efd438b3700 10 osd.0 4 do_waiters -- start 2016-03-08 15:19:04.585403 7efd438b3700 10 osd.0 4 do_waiters -- finish 2016-03-08 15:19:04.585404 7efd438b3700 20 osd.0 4 _dispatch 0x5643b4eec580 osd_map(5..6 src has 1..6) v3 2016-03-08 15:19:04.585453 7efd438b3700 3 osd.0 4 handle_osd_map epochs [5,6], i have 4, src has [1,6] 2016-03-08 15:19:04.585457 7efd438b3700 10 osd.0 4 handle_osd_map got inc map for epoch 5 2016-03-08 15:19:04.585508 7efd438b3700 20 osd.0 4 got_full_map 5, nothing requested 2016-03-08 15:19:04.585529 7efd438b3700 10 osd.0 4 handle_osd_map got inc map for epoch 6 2016-03-08 15:19:04.585572 7efd438b3700 20 osd.0 4 got_full_map 6, nothing requested 2016-03-08 15:19:04.585595 7efd438b3700 10 osd.0 4 advance to epoch 5 (<= newest 6) 2016-03-08 15:19:04.585599 7efd438b3700 7 osd.0 5 advance_map epoch 5 2016-03-08 15:19:04.585600 7efd438b3700 10 osd.0 5 advance to epoch 6 (<= newest 6) 2016-03-08 15:19:04.585602 7efd438b3700 7 osd.0 6 advance_map epoch 6 2016-03-08 15:19:04.585603 7efd438b3700 10 osd.0 6 up_epoch is 6 2016-03-08 15:19:04.585604 7efd438b3700 10 osd.0 6 boot_epoch is 6 2016-03-08 15:19:04.585605 7efd438b3700 1 osd.0 6 state: booting -> active 2016-03-08 15:19:04.585608 7efd438b3700 10 osd.0 6 write_superblock sb(c4878902-7748-4a77-afab-07655d3b0201 osd.0 ad2b7a59-c78c-449e-86c6-e537c3b12550 e6 [1,6] lci=[6,6]) 2016-03-08 15:19:04.585658 7efd438b3700 7 osd.0 6 consume_map version 6 2016-03-08 15:19:04.585664 7efd438b3700 10 osd.0 6 maybe_update_heartbeat_peers updating 2016-03-08 15:19:04.585665 7efd438b3700 10 osd.0 6 maybe_update_heartbeat_peers 0 peers, extras 2016-03-08 15:19:04.585667 7efd438b3700 7 osd.0 6 activate_map version 6 2016-03-08 15:19:04.585674 7efd438b3700 10 osd.0 6 do_waiters -- start 2016-03-08 15:19:04.585675 7efd438b3700 10 osd.0 6 do_waiters -- finish 2016-03-08 15:19:04.585685 7efd438b3700 10 osd.0 6 do_waiters -- start 2016-03-08 15:19:04.585688 7efd438b3700 10 osd.0 6 do_waiters -- finish 2016-03-08 15:19:04.585689 7efd438b3700 20 osd.0 6 _dispatch 0x5643b4eec080 osd_map(5..6 src has 1..6) v3 2016-03-08 15:19:04.585694 7efd438b3700 3 osd.0 6 handle_osd_map epochs [5,6], i have 6, src has [1,6] 2016-03-08 15:19:04.585696 7efd438b3700 10 osd.0 6 no new maps here, dropping 2016-03-08 15:19:04.585697 7efd438b3700 10 osd.0 6 do_waiters -- start 2016-03-08 15:19:04.585698 7efd438b3700 10 osd.0 6 do_waiters -- finish 2016-03-08 15:19:04.585706 7efd3088d700 20 osd.0 6 agent_entry empty queue 2016-03-08 15:19:04.605826 7efd438b3700 10 osd.0 6 do_waiters -- start 2016-03-08 15:19:04.605829 7efd438b3700 10 osd.0 6 do_waiters -- finish 2016-03-08 15:19:04.605830 7efd438b3700 20 osd.0 6 _dispatch 0x5643b4edd180 osd_pg_create(e6 1.0:3 1.1:3 1.2:3 1.3:3) v3 2016-03-08 15:19:04.605854 7efd438b3700 10 osd.0 6 handle_pg_create osd_pg_create(e6 1.0:3 1.1:3 1.2:3 1.3:3) v3 2016-03-08 15:19:04.605857 7efd438b3700 15 osd.0 6 require_same_or_newer_map 6 (i am 6) 0x5643b4edd180 2016-03-08 15:19:04.605865 7efd438b3700 20 osd.0 6 mkpg 1.0 e3@2016-03-08 15:19:02.851199 2016-03-08 15:19:04.605892 7efd438b3700 15 osd.0 6 project_pg_history 1.0 from 3 to 6, start ec=3 les/c/f 0/0/0 0/0/0 2016-03-08 15:19:04.605914 7efd438b3700 15 osd.0 6 project_pg_history 1.0 acting|up changed in 6 from []/[] -1/-1 -> [0]/[0] 0/0 2016-03-08 15:19:04.605919 7efd438b3700 15 osd.0 6 project_pg_history 1.0 up changed in 6 from [] -1 -> [0] 0 2016-03-08 15:19:04.605921 7efd438b3700 15 osd.0 6 project_pg_history 1.0 primary changed in 6 2016-03-08 15:19:04.605921 7efd438b3700 15 osd.0 6 project_pg_history end ec=3 les/c/f 0/0/0 6/6/6 2016-03-08 15:19:04.605942 7efd438b3700 20 osd.0 6 _create_lock_pg pgid 1.0 2016-03-08 15:19:04.605944 7efd438b3700 10 osd.0 6 _open_lock_pg 1.0 2016-03-08 15:19:04.605948 7efd438b3700 10 osd.0 6 _get_pool 1 2016-03-08 15:19:04.605996 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0(unlocked)] enter Initial 2016-03-08 15:19:04.606023 7efd438b3700 20 osd.0 pg_epoch: 6 pg[1.0(unlocked)] enter NotTrimming 2016-03-08 15:19:04.606031 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( DNE empty local-les=0 n=0 ec=0 les/c/f 0/0/0 0/0/0) [] r=0 lpr=0 crt=0'0 inactive NIBBLEWISE] init role 0 up [0] acting [0] history ec=3 les/c/f 0/0/0 6/6/6 0 past_intervals 2016-03-08 15:19:04.606053 7efd438b3700 5 write_log with: dirty_to: 0'0, dirty_from: 4294967295'18446744073709551615, dirty_divergent_priors: false, divergent_priors: 0, writeout_from: 4294967295'18446744073709551615, trimmed: 2016-03-08 15:19:04.606059 7efd438b3700 7 osd.0 6 _create_lock_pg pg[1.0( empty local-les=0 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] 2016-03-08 15:19:04.606063 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] handle_create 2016-03-08 15:19:04.606067 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] exit Initial 0.000071 0 0.000000 2016-03-08 15:19:04.606072 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] enter Reset 2016-03-08 15:19:04.606076 7efd438b3700 20 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] set_last_peering_reset 6 2016-03-08 15:19:04.606081 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] Clearing blocked outgoing recovery messages 2016-03-08 15:19:04.606083 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] Not blocking outgoing recovery messages 2016-03-08 15:19:04.606088 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] update_heartbeat_peers -> 0 2016-03-08 15:19:04.606091 7efd438b3700 20 osd.0 6 need_heartbeat_peer_update 2016-03-08 15:19:04.606092 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] take_waiters 2016-03-08 15:19:04.606095 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] exit Reset 0.000023 1 0.000020 2016-03-08 15:19:04.606099 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] enter Started 2016-03-08 15:19:04.606103 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] enter Start 2016-03-08 15:19:04.606106 7efd438b3700 1 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] state: transitioning to Primary 2016-03-08 15:19:04.606110 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] exit Start 0.000007 0 0.000000 2016-03-08 15:19:04.606115 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] enter Started/Primary 2016-03-08 15:19:04.606117 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 creating] enter Started/Primary/Peering 2016-03-08 15:19:04.606123 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/GetInfo 2016-03-08 15:19:04.606131 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 creating+peering] generate_past_intervals over epochs 3-6 2016-03-08 15:19:04.606162 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] generate_past_intervals interval(3-5 up [](-1) acting [](-1)) : acting set is too small 2016-03-08 15:19:04.606171 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] PriorSet: build_prior interval(3-5 up [](-1) acting [](-1)) 2016-03-08 15:19:04.606176 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] PriorSet: build_prior final: probe 0 down blocked_by {} 2016-03-08 15:19:04.606182 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] up_thru 0 < same_since 6, must notify monitor 2016-03-08 15:19:04.606186 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6:1 2016-03-08 15:19:04.606192 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/GetInfo 0.000069 0 0.000000 2016-03-08 15:19:04.606197 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/GetLog 2016-03-08 15:19:04.606202 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] calc_acting osd.0 1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) 2016-03-08 15:19:04.606211 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] calc_acting newest update on osd.0 with 1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) up_primary: 0) selected as primary calc_acting primary is osd.0 with 1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) 2016-03-08 15:19:04.606217 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] actingbackfill is 0 2016-03-08 15:19:04.606220 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] choose_acting want [0] (== acting) backfill_targets 2016-03-08 15:19:04.606226 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: leaving GetLog 2016-03-08 15:19:04.606231 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/GetLog 0.000034 0 0.000000 2016-03-08 15:19:04.606235 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.606185 2016-03-08 15:19:04.606242 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/GetMissing 2016-03-08 15:19:04.606245 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: still need up_thru update before going active 2016-03-08 15:19:04.606250 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/GetMissing 0.000007 0 0.000000 2016-03-08 15:19:04.606255 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.606185 2016-03-08 15:19:04.606260 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/WaitUpThru 2016-03-08 15:19:04.606273 7efd438b3700 10 log is not dirty 2016-03-08 15:19:04.606276 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.606185 2016-03-08 15:19:04.606307 7efd438b3700 20 osd.0 6 mkpg 1.1 e3@2016-03-08 15:19:02.851204 2016-03-08 15:19:04.606319 7efd438b3700 15 osd.0 6 project_pg_history 1.1 from 3 to 6, start ec=3 les/c/f 0/0/0 0/0/0 2016-03-08 15:19:04.606333 7efd438b3700 15 osd.0 6 project_pg_history 1.1 acting|up changed in 6 from []/[] -1/-1 -> [0]/[0] 0/0 2016-03-08 15:19:04.606335 7efd438b3700 15 osd.0 6 project_pg_history 1.1 up changed in 6 from [] -1 -> [0] 0 2016-03-08 15:19:04.606336 7efd438b3700 15 osd.0 6 project_pg_history 1.1 primary changed in 6 2016-03-08 15:19:04.606337 7efd438b3700 15 osd.0 6 project_pg_history end ec=3 les/c/f 0/0/0 6/6/6 2016-03-08 15:19:04.606347 7efd438b3700 20 osd.0 6 _create_lock_pg pgid 1.1 2016-03-08 15:19:04.606347 7efd438b3700 10 osd.0 6 _open_lock_pg 1.1 2016-03-08 15:19:04.606349 7efd438b3700 10 osd.0 6 _get_pool 1 2016-03-08 15:19:04.606358 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1(unlocked)] enter Initial 2016-03-08 15:19:04.606363 7efd438b3700 20 osd.0 pg_epoch: 6 pg[1.1(unlocked)] enter NotTrimming 2016-03-08 15:19:04.606366 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( DNE empty local-les=0 n=0 ec=0 les/c/f 0/0/0 0/0/0) [] r=0 lpr=0 crt=0'0 inactive NIBBLEWISE] init role 0 up [0] acting [0] history ec=3 les/c/f 0/0/0 6/6/6 0 past_intervals 2016-03-08 15:19:04.606376 7efd438b3700 5 write_log with: dirty_to: 0'0, dirty_from: 4294967295'18446744073709551615, dirty_divergent_priors: false, divergent_priors: 0, writeout_from: 4294967295'18446744073709551615, trimmed: 2016-03-08 15:19:04.606380 7efd438b3700 7 osd.0 6 _create_lock_pg pg[1.1( empty local-les=0 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] 2016-03-08 15:19:04.606382 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] handle_create 2016-03-08 15:19:04.606386 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] exit Initial 0.000028 0 0.000000 2016-03-08 15:19:04.606393 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] enter Reset 2016-03-08 15:19:04.606399 7efd438b3700 20 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] set_last_peering_reset 6 2016-03-08 15:19:04.606403 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] Clearing blocked outgoing recovery messages 2016-03-08 15:19:04.606407 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] Not blocking outgoing recovery messages 2016-03-08 15:19:04.606413 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] update_heartbeat_peers -> 0 2016-03-08 15:19:04.606418 7efd438b3700 20 osd.0 6 need_heartbeat_peer_update 2016-03-08 15:19:04.606419 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] take_waiters 2016-03-08 15:19:04.606424 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] exit Reset 0.000031 1 0.000026 2016-03-08 15:19:04.606427 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] enter Started 2016-03-08 15:19:04.606430 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] enter Start 2016-03-08 15:19:04.606432 7efd438b3700 1 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] state: transitioning to Primary 2016-03-08 15:19:04.606435 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] exit Start 0.000005 0 0.000000 2016-03-08 15:19:04.606438 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] enter Started/Primary 2016-03-08 15:19:04.606441 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 creating] enter Started/Primary/Peering 2016-03-08 15:19:04.606443 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/GetInfo 2016-03-08 15:19:04.606448 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 creating+peering] generate_past_intervals over epochs 3-6 2016-03-08 15:19:04.606474 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] generate_past_intervals interval(3-5 up [](-1) acting [](-1)) : acting set is too small 2016-03-08 15:19:04.606478 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] PriorSet: build_prior interval(3-5 up [](-1) acting [](-1)) 2016-03-08 15:19:04.606481 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] PriorSet: build_prior final: probe 0 down blocked_by {} 2016-03-08 15:19:04.606485 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] up_thru 0 < same_since 6, must notify monitor 2016-03-08 15:19:04.606489 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6:1 2016-03-08 15:19:04.606493 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/GetInfo 0.000049 0 0.000000 2016-03-08 15:19:04.606497 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/GetLog 2016-03-08 15:19:04.606500 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] calc_acting osd.0 1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) 2016-03-08 15:19:04.606507 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] calc_acting newest update on osd.0 with 1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) up_primary: 0) selected as primary calc_acting primary is osd.0 with 1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) 2016-03-08 15:19:04.606511 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] actingbackfill is 0 2016-03-08 15:19:04.606515 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] choose_acting want [0] (== acting) backfill_targets 2016-03-08 15:19:04.606518 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: leaving GetLog 2016-03-08 15:19:04.606523 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/GetLog 0.000025 0 0.000000 2016-03-08 15:19:04.606527 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.606489 2016-03-08 15:19:04.606533 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/GetMissing 2016-03-08 15:19:04.606536 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: still need up_thru update before going active 2016-03-08 15:19:04.606539 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/GetMissing 0.000006 0 0.000000 2016-03-08 15:19:04.606547 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.606489 2016-03-08 15:19:04.606556 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/WaitUpThru 2016-03-08 15:19:04.606569 7efd438b3700 10 log is not dirty 2016-03-08 15:19:04.606572 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.606489 2016-03-08 15:19:04.606595 7efd438b3700 20 osd.0 6 mkpg 1.2 e3@2016-03-08 15:19:02.851208 2016-03-08 15:19:04.606607 7efd438b3700 15 osd.0 6 project_pg_history 1.2 from 3 to 6, start ec=3 les/c/f 0/0/0 0/0/0 2016-03-08 15:19:04.606620 7efd438b3700 15 osd.0 6 project_pg_history 1.2 acting|up changed in 6 from []/[] -1/-1 -> [0]/[0] 0/0 2016-03-08 15:19:04.606622 7efd438b3700 15 osd.0 6 project_pg_history 1.2 up changed in 6 from [] -1 -> [0] 0 2016-03-08 15:19:04.606623 7efd438b3700 15 osd.0 6 project_pg_history 1.2 primary changed in 6 2016-03-08 15:19:04.606624 7efd438b3700 15 osd.0 6 project_pg_history end ec=3 les/c/f 0/0/0 6/6/6 2016-03-08 15:19:04.606633 7efd438b3700 20 osd.0 6 _create_lock_pg pgid 1.2 2016-03-08 15:19:04.606634 7efd438b3700 10 osd.0 6 _open_lock_pg 1.2 2016-03-08 15:19:04.606635 7efd438b3700 10 osd.0 6 _get_pool 1 2016-03-08 15:19:04.606647 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2(unlocked)] enter Initial 2016-03-08 15:19:04.606652 7efd438b3700 20 osd.0 pg_epoch: 6 pg[1.2(unlocked)] enter NotTrimming 2016-03-08 15:19:04.606654 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( DNE empty local-les=0 n=0 ec=0 les/c/f 0/0/0 0/0/0) [] r=0 lpr=0 crt=0'0 inactive NIBBLEWISE] init role 0 up [0] acting [0] history ec=3 les/c/f 0/0/0 6/6/6 0 past_intervals 2016-03-08 15:19:04.606663 7efd438b3700 5 write_log with: dirty_to: 0'0, dirty_from: 4294967295'18446744073709551615, dirty_divergent_priors: false, divergent_priors: 0, writeout_from: 4294967295'18446744073709551615, trimmed: 2016-03-08 15:19:04.606666 7efd438b3700 7 osd.0 6 _create_lock_pg pg[1.2( empty local-les=0 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] 2016-03-08 15:19:04.606670 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] handle_create 2016-03-08 15:19:04.606674 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] exit Initial 0.000026 0 0.000000 2016-03-08 15:19:04.606677 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] enter Reset 2016-03-08 15:19:04.606681 7efd438b3700 20 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] set_last_peering_reset 6 2016-03-08 15:19:04.606683 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] Clearing blocked outgoing recovery messages 2016-03-08 15:19:04.606685 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] Not blocking outgoing recovery messages 2016-03-08 15:19:04.606688 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] update_heartbeat_peers -> 0 2016-03-08 15:19:04.606692 7efd438b3700 20 osd.0 6 need_heartbeat_peer_update 2016-03-08 15:19:04.606692 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] take_waiters 2016-03-08 15:19:04.606695 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] exit Reset 0.000017 1 0.000014 2016-03-08 15:19:04.606698 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] enter Started 2016-03-08 15:19:04.606700 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] enter Start 2016-03-08 15:19:04.606703 7efd438b3700 1 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] state: transitioning to Primary 2016-03-08 15:19:04.606707 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] exit Start 0.000006 0 0.000000 2016-03-08 15:19:04.606711 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] enter Started/Primary 2016-03-08 15:19:04.606714 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 creating] enter Started/Primary/Peering 2016-03-08 15:19:04.606717 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/GetInfo 2016-03-08 15:19:04.606724 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 creating+peering] generate_past_intervals over epochs 3-6 2016-03-08 15:19:04.606750 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] generate_past_intervals interval(3-5 up [](-1) acting [](-1)) : acting set is too small 2016-03-08 15:19:04.606755 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] PriorSet: build_prior interval(3-5 up [](-1) acting [](-1)) 2016-03-08 15:19:04.606758 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] PriorSet: build_prior final: probe 0 down blocked_by {} 2016-03-08 15:19:04.606760 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] up_thru 0 < same_since 6, must notify monitor 2016-03-08 15:19:04.606764 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6:1 2016-03-08 15:19:04.606768 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/GetInfo 0.000051 0 0.000000 2016-03-08 15:19:04.606773 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/GetLog 2016-03-08 15:19:04.606778 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] calc_acting osd.0 1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) 2016-03-08 15:19:04.606785 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] calc_acting newest update on osd.0 with 1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) up_primary: 0) selected as primary calc_acting primary is osd.0 with 1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) 2016-03-08 15:19:04.606788 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] actingbackfill is 0 2016-03-08 15:19:04.606791 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] choose_acting want [0] (== acting) backfill_targets 2016-03-08 15:19:04.606794 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: leaving GetLog 2016-03-08 15:19:04.606798 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/GetLog 0.000025 0 0.000000 2016-03-08 15:19:04.606802 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.606764 2016-03-08 15:19:04.606808 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/GetMissing 2016-03-08 15:19:04.606810 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: still need up_thru update before going active 2016-03-08 15:19:04.606815 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/GetMissing 0.000007 0 0.000000 2016-03-08 15:19:04.606819 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.606764 2016-03-08 15:19:04.606823 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/WaitUpThru 2016-03-08 15:19:04.606836 7efd438b3700 10 log is not dirty 2016-03-08 15:19:04.606839 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.606764 2016-03-08 15:19:04.606857 7efd438b3700 20 osd.0 6 mkpg 1.3 e3@2016-03-08 15:19:02.851210 2016-03-08 15:19:04.606869 7efd438b3700 15 osd.0 6 project_pg_history 1.3 from 3 to 6, start ec=3 les/c/f 0/0/0 0/0/0 2016-03-08 15:19:04.606883 7efd438b3700 15 osd.0 6 project_pg_history 1.3 acting|up changed in 6 from []/[] -1/-1 -> [0]/[0] 0/0 2016-03-08 15:19:04.606885 7efd438b3700 15 osd.0 6 project_pg_history 1.3 up changed in 6 from [] -1 -> [0] 0 2016-03-08 15:19:04.606886 7efd438b3700 15 osd.0 6 project_pg_history 1.3 primary changed in 6 2016-03-08 15:19:04.606887 7efd438b3700 15 osd.0 6 project_pg_history end ec=3 les/c/f 0/0/0 6/6/6 2016-03-08 15:19:04.606895 7efd438b3700 20 osd.0 6 _create_lock_pg pgid 1.3 2016-03-08 15:19:04.606897 7efd438b3700 10 osd.0 6 _open_lock_pg 1.3 2016-03-08 15:19:04.606898 7efd438b3700 10 osd.0 6 _get_pool 1 2016-03-08 15:19:04.606907 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3(unlocked)] enter Initial 2016-03-08 15:19:04.606911 7efd438b3700 20 osd.0 pg_epoch: 6 pg[1.3(unlocked)] enter NotTrimming 2016-03-08 15:19:04.606914 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( DNE empty local-les=0 n=0 ec=0 les/c/f 0/0/0 0/0/0) [] r=0 lpr=0 crt=0'0 inactive NIBBLEWISE] init role 0 up [0] acting [0] history ec=3 les/c/f 0/0/0 6/6/6 0 past_intervals 2016-03-08 15:19:04.606923 7efd438b3700 5 write_log with: dirty_to: 0'0, dirty_from: 4294967295'18446744073709551615, dirty_divergent_priors: false, divergent_priors: 0, writeout_from: 4294967295'18446744073709551615, trimmed: 2016-03-08 15:19:04.606926 7efd438b3700 7 osd.0 6 _create_lock_pg pg[1.3( empty local-les=0 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] 2016-03-08 15:19:04.606928 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] handle_create 2016-03-08 15:19:04.606931 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] exit Initial 0.000024 0 0.000000 2016-03-08 15:19:04.606936 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] enter Reset 2016-03-08 15:19:04.606938 7efd438b3700 20 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=0 crt=0'0 mlcod 0'0 inactive] set_last_peering_reset 6 2016-03-08 15:19:04.606942 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] Clearing blocked outgoing recovery messages 2016-03-08 15:19:04.606944 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] Not blocking outgoing recovery messages 2016-03-08 15:19:04.606947 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] update_heartbeat_peers -> 0 2016-03-08 15:19:04.606953 7efd438b3700 20 osd.0 6 need_heartbeat_peer_update 2016-03-08 15:19:04.606954 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] take_waiters 2016-03-08 15:19:04.606957 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] exit Reset 0.000021 1 0.000015 2016-03-08 15:19:04.606960 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] enter Started 2016-03-08 15:19:04.606962 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] enter Start 2016-03-08 15:19:04.606964 7efd438b3700 1 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] state: transitioning to Primary 2016-03-08 15:19:04.606967 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] exit Start 0.000005 0 0.000000 2016-03-08 15:19:04.606970 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 inactive] enter Started/Primary 2016-03-08 15:19:04.606973 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 creating] enter Started/Primary/Peering 2016-03-08 15:19:04.606975 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/GetInfo 2016-03-08 15:19:04.606980 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 creating+peering] generate_past_intervals over epochs 3-6 2016-03-08 15:19:04.607007 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] generate_past_intervals interval(3-5 up [](-1) acting [](-1)) : acting set is too small 2016-03-08 15:19:04.607012 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] PriorSet: build_prior interval(3-5 up [](-1) acting [](-1)) 2016-03-08 15:19:04.607017 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] PriorSet: build_prior final: probe 0 down blocked_by {} 2016-03-08 15:19:04.607019 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] up_thru 0 < same_since 6, must notify monitor 2016-03-08 15:19:04.607023 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6:1 2016-03-08 15:19:04.607027 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/GetInfo 0.000052 0 0.000000 2016-03-08 15:19:04.607032 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/GetLog 2016-03-08 15:19:04.607036 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] calc_acting osd.0 1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) 2016-03-08 15:19:04.607043 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] calc_acting newest update on osd.0 with 1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) up_primary: 0) selected as primary calc_acting primary is osd.0 with 1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) 2016-03-08 15:19:04.607046 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] actingbackfill is 0 2016-03-08 15:19:04.607049 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] choose_acting want [0] (== acting) backfill_targets 2016-03-08 15:19:04.607052 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: leaving GetLog 2016-03-08 15:19:04.607057 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/GetLog 0.000024 0 0.000000 2016-03-08 15:19:04.607061 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.607022 2016-03-08 15:19:04.607066 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/GetMissing 2016-03-08 15:19:04.607070 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: still need up_thru update before going active 2016-03-08 15:19:04.607074 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/GetMissing 0.000007 0 0.000000 2016-03-08 15:19:04.607077 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.607022 2016-03-08 15:19:04.607081 7efd438b3700 5 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] enter Started/Primary/Peering/WaitUpThru 2016-03-08 15:19:04.607094 7efd438b3700 10 log is not dirty 2016-03-08 15:19:04.607097 7efd438b3700 15 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.607022 2016-03-08 15:19:04.607121 7efd438b3700 10 osd.0 6 maybe_update_heartbeat_peers updating 2016-03-08 15:19:04.607123 7efd438b3700 20 osd.0 6 1.3 heartbeat_peers 0 2016-03-08 15:19:04.607124 7efd438b3700 20 osd.0 6 1.2 heartbeat_peers 0 2016-03-08 15:19:04.607124 7efd438b3700 20 osd.0 6 1.1 heartbeat_peers 0 2016-03-08 15:19:04.607129 7efd438b3700 20 osd.0 6 1.0 heartbeat_peers 0 2016-03-08 15:19:04.607130 7efd438b3700 10 osd.0 6 maybe_update_heartbeat_peers 0 peers, extras 2016-03-08 15:19:04.607138 7efd438b3700 10 osd.0 6 do_waiters -- start 2016-03-08 15:19:04.607138 7efd438b3700 10 osd.0 6 do_waiters -- finish 2016-03-08 15:19:04.625065 7efd46a28700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] flushed 2016-03-08 15:19:04.625119 7efd3a0a0700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] handle_peering_event: epoch_sent: 6 epoch_requested: 6 FlushedEvt 2016-03-08 15:19:04.625137 7efd3a0a0700 15 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] requeue_ops 2016-03-08 15:19:04.625143 7efd3a0a0700 10 log is not dirty 2016-03-08 15:19:04.625144 7efd3a0a0700 10 osd.0 6 queue_want_up_thru now 6 (was 0), currently 0 2016-03-08 15:19:04.625146 7efd3a0a0700 10 osd.0 6 send_alive up_thru currently 0 want 6 2016-03-08 15:19:04.625147 7efd3a0a0700 10 osd.0 6 send_alive want 6 2016-03-08 15:19:04.637784 7efd46a28700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] flushed 2016-03-08 15:19:04.637827 7efd3a8a1700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] handle_peering_event: epoch_sent: 6 epoch_requested: 6 FlushedEvt 2016-03-08 15:19:04.637865 7efd3a8a1700 15 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] requeue_ops 2016-03-08 15:19:04.637878 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.637881 7efd3a8a1700 10 osd.0 6 queue_want_up_thru want 6 <= queued 6, currently 0 2016-03-08 15:19:04.643892 7efd46a28700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] flushed 2016-03-08 15:19:04.643920 7efd3a0a0700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] handle_peering_event: epoch_sent: 6 epoch_requested: 6 FlushedEvt 2016-03-08 15:19:04.643944 7efd3a0a0700 15 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] requeue_ops 2016-03-08 15:19:04.643968 7efd3a0a0700 10 log is not dirty 2016-03-08 15:19:04.643971 7efd3a0a0700 10 osd.0 6 queue_want_up_thru want 6 <= queued 6, currently 0 2016-03-08 15:19:04.650209 7efd46a28700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] flushed 2016-03-08 15:19:04.650259 7efd3a8a1700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] handle_peering_event: epoch_sent: 6 epoch_requested: 6 FlushedEvt 2016-03-08 15:19:04.650279 7efd3a8a1700 15 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] requeue_ops 2016-03-08 15:19:04.650289 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.650291 7efd3a8a1700 10 osd.0 6 queue_want_up_thru want 6 <= queued 6, currently 0 2016-03-08 15:19:04.725209 7efd438b3700 10 osd.0 6 do_waiters -- start 2016-03-08 15:19:04.725211 7efd438b3700 10 osd.0 6 do_waiters -- finish 2016-03-08 15:19:04.725212 7efd438b3700 20 osd.0 6 _dispatch 0x5643b4eec800 osd_map(6..7 src has 1..7) v3 2016-03-08 15:19:04.725232 7efd438b3700 3 osd.0 6 handle_osd_map epochs [6,7], i have 6, src has [1,7] 2016-03-08 15:19:04.725234 7efd438b3700 10 osd.0 6 handle_osd_map got inc map for epoch 7 2016-03-08 15:19:04.725304 7efd438b3700 20 osd.0 6 got_full_map 7, nothing requested 2016-03-08 15:19:04.725334 7efd438b3700 10 osd.0 6 advance to epoch 7 (<= newest 7) 2016-03-08 15:19:04.725338 7efd438b3700 7 osd.0 7 advance_map epoch 7 2016-03-08 15:19:04.725341 7efd438b3700 10 osd.0 7 write_superblock sb(c4878902-7748-4a77-afab-07655d3b0201 osd.0 ad2b7a59-c78c-449e-86c6-e537c3b12550 e7 [1,7] lci=[6,7]) 2016-03-08 15:19:04.725389 7efd438b3700 7 osd.0 7 consume_map version 7 2016-03-08 15:19:04.725400 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] null 2016-03-08 15:19:04.725422 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] null 2016-03-08 15:19:04.725435 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] null 2016-03-08 15:19:04.725444 7efd438b3700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] null 2016-03-08 15:19:04.725454 7efd438b3700 7 osd.0 7 activate_map version 7 2016-03-08 15:19:04.725463 7efd438b3700 10 osd.0 7 do_waiters -- start 2016-03-08 15:19:04.725464 7efd438b3700 10 osd.0 7 do_waiters -- finish 2016-03-08 15:19:04.725478 7efd438b3700 10 osd.0 7 do_waiters -- start 2016-03-08 15:19:04.725479 7efd438b3700 10 osd.0 7 do_waiters -- finish 2016-03-08 15:19:04.725479 7efd438b3700 20 osd.0 7 _dispatch 0x5643b4eecd00 osd_map(7..7 src has 1..7) v3 2016-03-08 15:19:04.725483 7efd438b3700 3 osd.0 7 handle_osd_map epochs [7,7], i have 7, src has [1,7] 2016-03-08 15:19:04.725485 7efd438b3700 10 osd.0 7 no new maps here, dropping 2016-03-08 15:19:04.725486 7efd438b3700 10 osd.0 7 do_waiters -- start 2016-03-08 15:19:04.725486 7efd438b3700 10 osd.0 7 do_waiters -- finish 2016-03-08 15:19:04.725492 7efd3088d700 20 osd.0 7 agent_entry empty queue 2016-03-08 15:19:04.725524 7efd3a8a1700 10 osd.0 pg_epoch: 6 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] handle_advance_map [0]/[0] -- 0/0 2016-03-08 15:19:04.725539 7efd3a8a1700 20 PGPool::update cached_removed_snaps [] newly_removed_snaps [] snapc 0=[] (no change) 2016-03-08 15:19:04.725545 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: Peering advmap 2016-03-08 15:19:04.725554 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] adjust_need_up_thru now 6, need_up_thru now false 2016-03-08 15:19:04.725563 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: Started advmap 2016-03-08 15:19:04.725579 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] check_recovery_sources no source osds () went down 2016-03-08 15:19:04.725593 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] handle_activate_map 2016-03-08 15:19:04.725599 7efd3a8a1700 7 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: handle ActMap primary 2016-03-08 15:19:04.725605 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.607022 2016-03-08 15:19:04.725614 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] take_waiters 2016-03-08 15:19:04.725619 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/WaitUpThru 0.118537 3 0.000195 2016-03-08 15:19:04.725626 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: Leaving Peering 2016-03-08 15:19:04.725630 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering 0.118657 0 0.000000 2016-03-08 15:19:04.725636 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] enter Started/Primary/Active 2016-03-08 15:19:04.725640 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] state: In Active, about to call activate 2016-03-08 15:19:04.725656 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] activate - purged_snaps [] cached_removed_snaps [] 2016-03-08 15:19:04.725661 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] activate - snap_trimq [] 2016-03-08 15:19:04.725664 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] activate - no missing, moving last_complete 0'0 -> 0'0 2016-03-08 15:19:04.725669 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] needs_recovery is recovered 2016-03-08 15:19:04.725680 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] publish_stats_to_osd 7:2 2016-03-08 15:19:04.725686 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] state: Activate Finished 2016-03-08 15:19:04.725691 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] enter Started/Primary/Active/Activating 2016-03-08 15:19:04.725695 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] handle_activate_map: Not dirtying info: last_persisted is 6 while current is 7 2016-03-08 15:19:04.725702 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] handle_peering_event: epoch_sent: 7 epoch_requested: 7 NullEvt 2016-03-08 15:19:04.725725 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.725770 7efd3a8a1700 10 osd.0 pg_epoch: 6 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] handle_advance_map [0]/[0] -- 0/0 2016-03-08 15:19:04.725781 7efd3a8a1700 20 PGPool::update cached_removed_snaps [] newly_removed_snaps [] snapc 0=[] (no change) 2016-03-08 15:19:04.725785 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: Peering advmap 2016-03-08 15:19:04.725793 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] adjust_need_up_thru now 6, need_up_thru now false 2016-03-08 15:19:04.725799 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: Started advmap 2016-03-08 15:19:04.725810 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] check_recovery_sources no source osds () went down 2016-03-08 15:19:04.725819 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] handle_activate_map 2016-03-08 15:19:04.725824 7efd3a8a1700 7 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: handle ActMap primary 2016-03-08 15:19:04.725829 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.606764 2016-03-08 15:19:04.725835 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] take_waiters 2016-03-08 15:19:04.725840 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/WaitUpThru 0.119016 3 0.000197 2016-03-08 15:19:04.725844 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: Leaving Peering 2016-03-08 15:19:04.725848 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering 0.119133 0 0.000000 2016-03-08 15:19:04.725853 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] enter Started/Primary/Active 2016-03-08 15:19:04.725857 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] state: In Active, about to call activate 2016-03-08 15:19:04.725863 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] activate - purged_snaps [] cached_removed_snaps [] 2016-03-08 15:19:04.725866 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] activate - snap_trimq [] 2016-03-08 15:19:04.725869 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] activate - no missing, moving last_complete 0'0 -> 0'0 2016-03-08 15:19:04.725880 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] needs_recovery is recovered 2016-03-08 15:19:04.725884 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] publish_stats_to_osd 7:2 2016-03-08 15:19:04.725888 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] state: Activate Finished 2016-03-08 15:19:04.725892 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] enter Started/Primary/Active/Activating 2016-03-08 15:19:04.725896 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] handle_activate_map: Not dirtying info: last_persisted is 6 while current is 7 2016-03-08 15:19:04.725899 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] handle_peering_event: epoch_sent: 7 epoch_requested: 7 NullEvt 2016-03-08 15:19:04.725911 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.725942 7efd3a8a1700 10 osd.0 pg_epoch: 6 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] handle_advance_map [0]/[0] -- 0/0 2016-03-08 15:19:04.725963 7efd3a8a1700 20 PGPool::update cached_removed_snaps [] newly_removed_snaps [] snapc 0=[] (no change) 2016-03-08 15:19:04.725965 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: Peering advmap 2016-03-08 15:19:04.725969 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] adjust_need_up_thru now 6, need_up_thru now false 2016-03-08 15:19:04.725973 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: Started advmap 2016-03-08 15:19:04.725977 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] check_recovery_sources no source osds () went down 2016-03-08 15:19:04.725982 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] handle_activate_map 2016-03-08 15:19:04.725986 7efd3a8a1700 7 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: handle ActMap primary 2016-03-08 15:19:04.725990 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.606489 2016-03-08 15:19:04.725996 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] take_waiters 2016-03-08 15:19:04.726000 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/WaitUpThru 0.119448 3 0.000187 2016-03-08 15:19:04.726004 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: Leaving Peering 2016-03-08 15:19:04.726014 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering 0.119573 0 0.000000 2016-03-08 15:19:04.726019 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] enter Started/Primary/Active 2016-03-08 15:19:04.726023 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] state: In Active, about to call activate 2016-03-08 15:19:04.726028 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] activate - purged_snaps [] cached_removed_snaps [] 2016-03-08 15:19:04.726031 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] activate - snap_trimq [] 2016-03-08 15:19:04.726034 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] activate - no missing, moving last_complete 0'0 -> 0'0 2016-03-08 15:19:04.726038 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] needs_recovery is recovered 2016-03-08 15:19:04.726042 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] publish_stats_to_osd 7:2 2016-03-08 15:19:04.726045 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] state: Activate Finished 2016-03-08 15:19:04.726049 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] enter Started/Primary/Active/Activating 2016-03-08 15:19:04.726053 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] handle_activate_map: Not dirtying info: last_persisted is 6 while current is 7 2016-03-08 15:19:04.726056 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] handle_peering_event: epoch_sent: 7 epoch_requested: 7 NullEvt 2016-03-08 15:19:04.726067 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.726100 7efd3a8a1700 10 osd.0 pg_epoch: 6 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] handle_advance_map [0]/[0] -- 0/0 2016-03-08 15:19:04.726111 7efd3a8a1700 20 PGPool::update cached_removed_snaps [] newly_removed_snaps [] snapc 0=[] (no change) 2016-03-08 15:19:04.726114 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: Peering advmap 2016-03-08 15:19:04.726120 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] adjust_need_up_thru now 6, need_up_thru now false 2016-03-08 15:19:04.726126 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: Started advmap 2016-03-08 15:19:04.726131 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] check_recovery_sources no source osds () went down 2016-03-08 15:19:04.726135 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] handle_activate_map 2016-03-08 15:19:04.726139 7efd3a8a1700 7 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: handle ActMap primary 2016-03-08 15:19:04.726143 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] publish_stats_to_osd 6: no change since 2016-03-08 15:19:04.606185 2016-03-08 15:19:04.726149 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] take_waiters 2016-03-08 15:19:04.726154 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering/WaitUpThru 0.119893 3 0.000209 2016-03-08 15:19:04.726158 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] state: Leaving Peering 2016-03-08 15:19:04.726161 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+peering] exit Started/Primary/Peering 0.120043 0 0.000000 2016-03-08 15:19:04.726166 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] enter Started/Primary/Active 2016-03-08 15:19:04.726170 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] state: In Active, about to call activate 2016-03-08 15:19:04.726175 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] activate - purged_snaps [] cached_removed_snaps [] 2016-03-08 15:19:04.726179 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] activate - snap_trimq [] 2016-03-08 15:19:04.726182 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] activate - no missing, moving last_complete 0'0 -> 0'0 2016-03-08 15:19:04.726186 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating] needs_recovery is recovered 2016-03-08 15:19:04.726190 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] publish_stats_to_osd 7:2 2016-03-08 15:19:04.726194 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] state: Activate Finished 2016-03-08 15:19:04.726197 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] enter Started/Primary/Active/Activating 2016-03-08 15:19:04.726201 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] handle_activate_map: Not dirtying info: last_persisted is 6 while current is 7 2016-03-08 15:19:04.726204 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] handle_peering_event: epoch_sent: 7 epoch_requested: 7 NullEvt 2016-03-08 15:19:04.726215 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.742887 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] _activate_committed 7 peer_activated now 0 last_epoch_started 0 same_interval_since 6 2016-03-08 15:19:04.742914 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] all_activated_and_committed 2016-03-08 15:19:04.742932 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] flushed 2016-03-08 15:19:04.742947 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] handle_peering_event: epoch_sent: 7 epoch_requested: 7 AllReplicasActivated 2016-03-08 15:19:04.742947 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] _activate_committed 7 peer_activated now 0 last_epoch_started 0 same_interval_since 6 2016-03-08 15:19:04.742971 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] all_activated_and_committed 2016-03-08 15:19:04.742973 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] share_pg_info 2016-03-08 15:19:04.742981 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7:3 2016-03-08 15:19:04.742985 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] check_local 2016-03-08 15:19:04.742984 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] flushed 2016-03-08 15:19:04.742994 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] needs_recovery is recovered 2016-03-08 15:19:04.742996 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] _activate_committed 7 peer_activated now 0 last_epoch_started 0 same_interval_since 6 2016-03-08 15:19:04.743002 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] needs_backfill does not need backfill 2016-03-08 15:19:04.743007 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] activate all replicas clean, no recovery 2016-03-08 15:19:04.743005 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] all_activated_and_committed 2016-03-08 15:19:04.743017 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] flushed 2016-03-08 15:19:04.743015 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7: no change since 2016-03-08 15:19:04.742978 2016-03-08 15:19:04.743024 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] hit_set_clear 2016-03-08 15:19:04.743028 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] agent_stop 2016-03-08 15:19:04.743034 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.743027 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] _activate_committed 7 peer_activated now 0 last_epoch_started 0 same_interval_since 6 2016-03-08 15:19:04.743035 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] all_activated_and_committed 2016-03-08 15:19:04.743043 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] handle_peering_event: epoch_sent: 7 epoch_requested: 7 FlushedEvt 2016-03-08 15:19:04.743044 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] flushed 2016-03-08 15:19:04.743051 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] requeue_ops 2016-03-08 15:19:04.743056 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.743058 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] handle_peering_event: epoch_sent: 7 epoch_requested: 7 AllReplicasActivated 2016-03-08 15:19:04.743062 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] share_pg_info 2016-03-08 15:19:04.743064 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] handle_peering_event: epoch_sent: 7 epoch_requested: 7 AllReplicasActivated 2016-03-08 15:19:04.743068 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7:3 2016-03-08 15:19:04.743077 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] check_local 2016-03-08 15:19:04.743081 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] needs_recovery is recovered 2016-03-08 15:19:04.743078 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] share_pg_info 2016-03-08 15:19:04.743084 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] needs_backfill does not need backfill 2016-03-08 15:19:04.743088 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] activate all replicas clean, no recovery 2016-03-08 15:19:04.743088 7efd3a0a0700 15 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7:3 2016-03-08 15:19:04.743094 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7: no change since 2016-03-08 15:19:04.743067 2016-03-08 15:19:04.743096 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] check_local 2016-03-08 15:19:04.743100 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] hit_set_clear 2016-03-08 15:19:04.743104 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] agent_stop 2016-03-08 15:19:04.743108 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.743103 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] needs_recovery is recovered 2016-03-08 15:19:04.743109 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] handle_peering_event: epoch_sent: 7 epoch_requested: 7 FlushedEvt 2016-03-08 15:19:04.743114 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] requeue_ops 2016-03-08 15:19:04.743111 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] needs_backfill does not need backfill 2016-03-08 15:19:04.743118 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.743119 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] handle_peering_event: epoch_sent: 7 epoch_requested: 7 AllReplicasRecovered 2016-03-08 15:19:04.743118 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] activate all replicas clean, no recovery 2016-03-08 15:19:04.743124 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] exit Started/Primary/Active/Activating 0.017433 4 0.000169 2016-03-08 15:19:04.743128 7efd3a0a0700 15 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7: no change since 2016-03-08 15:19:04.743086 2016-03-08 15:19:04.743131 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] enter Started/Primary/Active/Recovered 2016-03-08 15:19:04.743135 7efd3a0a0700 20 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] hit_set_clear 2016-03-08 15:19:04.743136 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] needs_recovery is recovered 2016-03-08 15:19:04.743139 7efd3a0a0700 20 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] agent_stop 2016-03-08 15:19:04.743143 7efd3a0a0700 10 log is not dirty 2016-03-08 15:19:04.743142 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] exit Started/Primary/Active/Recovered 0.000010 0 0.000000 2016-03-08 15:19:04.743145 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] handle_peering_event: epoch_sent: 7 epoch_requested: 7 FlushedEvt 2016-03-08 15:19:04.743147 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] enter Started/Primary/Active/Clean 2016-03-08 15:19:04.743151 7efd3a0a0700 15 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] requeue_ops 2016-03-08 15:19:04.743156 7efd3a0a0700 10 log is not dirty 2016-03-08 15:19:04.743153 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] finish_recovery 2016-03-08 15:19:04.743157 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] clear_recovery_state 2016-03-08 15:19:04.743161 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] handle_peering_event: epoch_sent: 7 epoch_requested: 7 AllReplicasRecovered 2016-03-08 15:19:04.743165 7efd3a0a0700 5 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] exit Started/Primary/Active/Activating 0.016967 4 0.000136 2016-03-08 15:19:04.743166 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] trim_past_intervals: trimming interval(3-5 up [](-1) acting [](-1)) 2016-03-08 15:19:04.743171 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] share_pg_info 2016-03-08 15:19:04.743170 7efd3a0a0700 5 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] enter Started/Primary/Active/Recovered 2016-03-08 15:19:04.743175 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7:4 2016-03-08 15:19:04.743176 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] needs_recovery is recovered 2016-03-08 15:19:04.743181 7efd3a0a0700 5 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] exit Started/Primary/Active/Recovered 0.000010 0 0.000000 2016-03-08 15:19:04.743186 7efd3a0a0700 5 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] enter Started/Primary/Active/Clean 2016-03-08 15:19:04.743191 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.743190 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] finish_recovery 2016-03-08 15:19:04.743193 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] clear_recovery_state 2016-03-08 15:19:04.743198 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] trim_past_intervals: trimming interval(3-5 up [](-1) acting [](-1)) 2016-03-08 15:19:04.743202 7efd3a0a0700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] share_pg_info 2016-03-08 15:19:04.743206 7efd3a0a0700 15 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7:4 2016-03-08 15:19:04.743221 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 0/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 creating+activating+undersized+degraded] handle_peering_event: epoch_sent: 7 epoch_requested: 7 AllReplicasActivated 2016-03-08 15:19:04.743228 7efd3a0a0700 10 log is not dirty 2016-03-08 15:19:04.743227 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] share_pg_info 2016-03-08 15:19:04.743231 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7:3 2016-03-08 15:19:04.743235 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] check_local 2016-03-08 15:19:04.743238 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] needs_recovery is recovered 2016-03-08 15:19:04.743241 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] needs_backfill does not need backfill 2016-03-08 15:19:04.743245 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] activate all replicas clean, no recovery 2016-03-08 15:19:04.743250 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7: no change since 2016-03-08 15:19:04.743230 2016-03-08 15:19:04.743255 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] hit_set_clear 2016-03-08 15:19:04.743259 7efd3a8a1700 20 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] agent_stop 2016-03-08 15:19:04.743263 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.743264 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] handle_peering_event: epoch_sent: 7 epoch_requested: 7 FlushedEvt 2016-03-08 15:19:04.743267 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] requeue_ops 2016-03-08 15:19:04.743271 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.743286 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] handle_peering_event: epoch_sent: 7 epoch_requested: 7 AllReplicasRecovered 2016-03-08 15:19:04.743290 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] exit Started/Primary/Active/Activating 0.017398 4 0.000124 2016-03-08 15:19:04.743299 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] enter Started/Primary/Active/Recovered 2016-03-08 15:19:04.743304 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] needs_recovery is recovered 2016-03-08 15:19:04.743308 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] exit Started/Primary/Active/Recovered 0.000013 0 0.000000 2016-03-08 15:19:04.743312 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] enter Started/Primary/Active/Clean 2016-03-08 15:19:04.743316 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] finish_recovery 2016-03-08 15:19:04.743319 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] clear_recovery_state 2016-03-08 15:19:04.743323 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] trim_past_intervals: trimming interval(3-5 up [](-1) acting [](-1)) 2016-03-08 15:19:04.743328 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] share_pg_info 2016-03-08 15:19:04.743331 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7:4 2016-03-08 15:19:04.743341 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.743356 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] handle_peering_event: epoch_sent: 7 epoch_requested: 7 AllReplicasRecovered 2016-03-08 15:19:04.743362 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] exit Started/Primary/Active/Activating 0.017312 4 0.000108 2016-03-08 15:19:04.743366 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] enter Started/Primary/Active/Recovered 2016-03-08 15:19:04.743371 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] needs_recovery is recovered 2016-03-08 15:19:04.743375 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] exit Started/Primary/Active/Recovered 0.000008 0 0.000000 2016-03-08 15:19:04.743380 7efd3a8a1700 5 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] enter Started/Primary/Active/Clean 2016-03-08 15:19:04.743384 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] finish_recovery 2016-03-08 15:19:04.743387 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/0/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] clear_recovery_state 2016-03-08 15:19:04.743392 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 pi=3-5/1 crt=0'0 mlcod 0'0 undersized+degraded+peered] trim_past_intervals: trimming interval(3-5 up [](-1) acting [](-1)) 2016-03-08 15:19:04.743396 7efd3a8a1700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] share_pg_info 2016-03-08 15:19:04.743399 7efd3a8a1700 15 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7:4 2016-03-08 15:19:04.743409 7efd3a8a1700 10 log is not dirty 2016-03-08 15:19:04.760005 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] _finish_recovery 2016-03-08 15:19:04.760031 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] purge_strays 2016-03-08 15:19:04.760041 7efd48a2c700 15 osd.0 pg_epoch: 7 pg[1.3( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7: no change since 2016-03-08 15:19:04.743175 2016-03-08 15:19:04.767120 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] _finish_recovery 2016-03-08 15:19:04.767134 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] purge_strays 2016-03-08 15:19:04.767141 7efd48a2c700 15 osd.0 pg_epoch: 7 pg[1.0( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7: no change since 2016-03-08 15:19:04.743205 2016-03-08 15:19:04.767150 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] _finish_recovery 2016-03-08 15:19:04.767155 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] purge_strays 2016-03-08 15:19:04.767161 7efd48a2c700 15 osd.0 pg_epoch: 7 pg[1.2( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7: no change since 2016-03-08 15:19:04.743331 2016-03-08 15:19:04.767168 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] _finish_recovery 2016-03-08 15:19:04.767174 7efd48a2c700 10 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] purge_strays 2016-03-08 15:19:04.767181 7efd48a2c700 15 osd.0 pg_epoch: 7 pg[1.1( empty local-les=3 n=0 ec=3 les/c/f 3/7/0 6/6/6) [0] r=0 lpr=6 crt=0'0 mlcod 0'0 undersized+degraded+peered] publish_stats_to_osd 7: no change since 2016-03-08 15:19:04.743399 ../qa/workunits/ceph-helpers.sh:1267: display_logs: read file ../qa/workunits/ceph-helpers.sh:1326: main: code=1 ../qa/workunits/ceph-helpers.sh:1328: main: teardown testdir/osd-crush ../qa/workunits/ceph-helpers.sh:117: teardown: local dir=testdir/osd-crush ../qa/workunits/ceph-helpers.sh:118: teardown: kill_daemons testdir/osd-crush KILL .../qa/workunits/ceph-helpers.sh:195: kill_daemons: shopt -q -o xtrace .../qa/workunits/ceph-helpers.sh:195: kill_daemons: echo true ../qa/workunits/ceph-helpers.sh:195: kill_daemons: local trace=true ../qa/workunits/ceph-helpers.sh:196: kill_daemons: true ../qa/workunits/ceph-helpers.sh:196: kill_daemons: shopt -u -o xtrace ../qa/workunits/ceph-helpers.sh:222: kill_daemons: return 0 .../qa/workunits/ceph-helpers.sh:119: teardown: stat -f -c %T . ../qa/workunits/ceph-helpers.sh:119: teardown: '[' btrfs == btrfs ']' ../qa/workunits/ceph-helpers.sh:120: teardown: __teardown_btrfs testdir/osd-crush ../qa/workunits/ceph-helpers.sh:126: __teardown_btrfs: local btrfs_base_dir=testdir/osd-crush .../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: ls -l testdir/osd-crush .../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: egrep '^d' .../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: awk '{print $9}' ../qa/workunits/ceph-helpers.sh:128: __teardown_btrfs: btrfs_dirs='0 a tmp' .../qa/workunits/ceph-helpers.sh:129: __teardown_btrfs: pwd ../qa/workunits/ceph-helpers.sh:129: __teardown_btrfs: current_path=/ceph/src .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: cd testdir/osd-crush .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: btrfs subvolume list . -t .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: egrep '^[0-9]' .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: awk '{print $4}' .../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: grep testdir/osd-crush/ ../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: for subvolume in '$(cd $btrfs_base_dir; btrfs subvolume list . -t |egrep '\''^[0-9]'\'' | awk '\''{print $4}'\'' |grep "$btrfs_base_dir/$btrfs_dir")' .../qa/workunits/ceph-helpers.sh:134: __teardown_btrfs: echo erwan/chroot/ceph/src/testdir/osd-crush/0/snap_1 .../qa/workunits/ceph-helpers.sh:134: __teardown_btrfs: sed -e 's|.*/ceph/src/||g' ../qa/workunits/ceph-helpers.sh:134: __teardown_btrfs: local_subvolume=testdir/osd-crush/0/snap_1 ../qa/workunits/ceph-helpers.sh:135: __teardown_btrfs: btrfs subvolume delete testdir/osd-crush/0/snap_1 Delete subvolume (no-commit): '/ceph/src/testdir/osd-crush/0/snap_1' ../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: for subvolume in '$(cd $btrfs_base_dir; btrfs subvolume list . -t |egrep '\''^[0-9]'\'' | awk '\''{print $4}'\'' |grep "$btrfs_base_dir/$btrfs_dir")' .../qa/workunits/ceph-helpers.sh:134: __teardown_btrfs: echo erwan/chroot/ceph/src/testdir/osd-crush/0/snap_2 .../qa/workunits/ceph-helpers.sh:134: __teardown_btrfs: sed -e 's|.*/ceph/src/||g' ../qa/workunits/ceph-helpers.sh:134: __teardown_btrfs: local_subvolume=testdir/osd-crush/0/snap_2 ../qa/workunits/ceph-helpers.sh:135: __teardown_btrfs: btrfs subvolume delete testdir/osd-crush/0/snap_2 Delete subvolume (no-commit): '/ceph/src/testdir/osd-crush/0/snap_2' ../qa/workunits/ceph-helpers.sh:131: __teardown_btrfs: for subvolume in '$(cd $btrfs_base_dir; btrfs subvolume list . -t |egrep '\''^[0-9]'\'' | awk '\''{print $4}'\'' |grep "$btrfs_base_dir/$btrfs_dir")' .../qa/workunits/ceph-helpers.sh:134: __teardown_btrfs: echo erwan/chroot/ceph/src/testdir/osd-crush/0/current .../qa/workunits/ceph-helpers.sh:134: __teardown_btrfs: sed -e 's|.*/ceph/src/||g' ../qa/workunits/ceph-helpers.sh:134: __teardown_btrfs: local_subvolume=testdir/osd-crush/0/current ../qa/workunits/ceph-helpers.sh:135: __teardown_btrfs: btrfs subvolume delete testdir/osd-crush/0/current Delete subvolume (no-commit): '/ceph/src/testdir/osd-crush/0/current' ../qa/workunits/ceph-helpers.sh:122: teardown: rm -fr testdir/osd-crush ../qa/workunits/ceph-helpers.sh:1329: main: return 1 FAIL test/mon/osd-crush.sh (exit status: 1)