Project

General

Profile

test.log

Kefu Chai, 07/19/2016 01:16 AM

Download (43.6 KB)

 
1
142/142 Test #104: rados-striper.sh ........................***Failed  300.67 sec
2
Enivronment Variables Already Set
3
+ PS4='${BASH_SOURCE[0]}:$LINENO: ${FUNCNAME[0]}:  '
4
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1447: main:  export PATH=/tmp/ceph.Ry3/ceph-disk-virtualenv/bin:/tmp/ceph.Ry3/ceph-detect-init-virtualenv/bin:.:/home/jenkins-build/build/workspace/ceph-pull-requests/build/bin:/home/jenkins-build/build/workspace/ceph-pull-requests/src:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games
5
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1447: main:  PATH=/tmp/ceph.Ry3/ceph-disk-virtualenv/bin:/tmp/ceph.Ry3/ceph-detect-init-virtualenv/bin:.:/home/jenkins-build/build/workspace/ceph-pull-requests/build/bin:/home/jenkins-build/build/workspace/ceph-pull-requests/src:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games
6
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1450: main:  export CEPH_CONF=/dev/null
7
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1450: main:  CEPH_CONF=/dev/null
8
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1451: main:  unset CEPH_ARGS
9
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1453: main:  local code
10
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1454: main:  run testdir/rados-striper
11
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:21: run:  local dir=testdir/rados-striper
12
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:22: run:  shift
13
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:24: run:  export CEPH_MON=127.0.0.1:7116
14
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:24: run:  CEPH_MON=127.0.0.1:7116
15
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:25: run:  export CEPH_ARGS
16
//home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:26: run:  uuidgen
17
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:26: run:  CEPH_ARGS+='--fsid=414e5204-fcfb-4176-ab89-aa7c684bd782 --auth-supported=none '
18
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:27: run:  CEPH_ARGS+='--mon-host=127.0.0.1:7116 '
19
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:30: run:  setup testdir/rados-striper
20
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:93: setup:  local dir=testdir/rados-striper
21
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:94: setup:  teardown testdir/rados-striper
22
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:118: teardown:  local dir=testdir/rados-striper
23
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:119: teardown:  kill_daemons testdir/rados-striper KILL
24
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:252: kill_daemons:  shopt -q -o xtrace
25
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:252: kill_daemons:  echo true
26
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:252: kill_daemons:  local trace=true
27
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:253: kill_daemons:  true
28
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:253: kill_daemons:  shopt -u -o xtrace
29
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:269: kill_daemons:  return 0
30
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:120: teardown:  stat -f -c %T .
31
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:120: teardown:  '[' xfs == btrfs ']'
32
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:123: teardown:  rm -fr testdir/rados-striper
33
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:95: setup:  mkdir -p testdir/rados-striper
34
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:33: run:  run_mon testdir/rados-striper a
35
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:340: run_mon:  local dir=testdir/rados-striper
36
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:341: run_mon:  shift
37
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:342: run_mon:  local id=a
38
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:343: run_mon:  shift
39
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:344: run_mon:  local data=testdir/rados-striper/a
40
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:347: run_mon:  ceph-mon --id a --mkfs --mon-data=testdir/rados-striper/a --run-dir=testdir/rados-striper
41
ceph-mon: mon.noname-a 127.0.0.1:7116/0 is local, renaming to mon.a
42
ceph-mon: set fsid to 414e5204-fcfb-4176-ab89-aa7c684bd782
43
ceph-mon: created monfs at testdir/rados-striper/a for mon.a
44
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:354: run_mon:  ceph-mon --id a --mon-osd-full-ratio=.99 --mon-data-avail-crit=1 --paxos-propose-interval=0.1 --osd-crush-chooseleaf-type=0 --erasure-code-dir=/home/jenkins-build/build/workspace/ceph-pull-requests/build/lib --plugin-dir=/home/jenkins-build/build/workspace/ceph-pull-requests/build/lib --debug-mon 20 --debug-ms 20 --debug-paxos 20 --chdir= --mon-data=testdir/rados-striper/a '--log-file=testdir/rados-striper/$name.log' '--admin-socket=testdir/rados-striper/$cluster-$name.asok' --mon-cluster-log-file=testdir/rados-striper/log --run-dir=testdir/rados-striper '--pid-file=testdir/rados-striper/$name.pid'
45
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:373: run_mon:  cat
46
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:373: run_mon:  get_config mon a fsid
47
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:754: get_config:  local daemon=mon
48
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:755: get_config:  local id=a
49
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:756: get_config:  local config=fsid
50
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:758: get_config:  CEPH_ARGS=
51
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:758: get_config:  ceph --format xml daemon testdir/rados-striper/ceph-mon.a.asok config get fsid
52
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:761: get_config:  xmlstarlet sel -t -m //fsid -v . -n
53
-:1.1: Start tag expected, '<' not found
54
admin_socket: exception getting command descriptions: [Errno 111] Connection ref
55
^
56
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:373: run_mon:  get_config mon a mon_host
57
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:754: get_config:  local daemon=mon
58
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:755: get_config:  local id=a
59
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:756: get_config:  local config=mon_host
60
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:761: get_config:  xmlstarlet sel -t -m //mon_host -v . -n
61
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:758: get_config:  CEPH_ARGS=
62
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:758: get_config:  ceph --format xml daemon testdir/rados-striper/ceph-mon.a.asok config get mon_host
63
-:1.1: Start tag expected, '<' not found
64
admin_socket: exception getting command descriptions: [Errno 111] Connection ref
65
^
66
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:378: run_mon:  get_config mon a mon_initial_members
67
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:754: get_config:  local daemon=mon
68
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:755: get_config:  local id=a
69
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:756: get_config:  local config=mon_initial_members
70
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:758: get_config:  CEPH_ARGS=
71
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:758: get_config:  ceph --format xml daemon testdir/rados-striper/ceph-mon.a.asok config get mon_initial_members
72
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:761: get_config:  xmlstarlet sel -t -m //mon_initial_members -v . -n
73
-:1.1: Start tag expected, '<' not found
74
admin_socket: exception getting command descriptions: [Errno 111] Connection ref
75
^
76
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:378: run_mon:  test -z ''
77
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:379: run_mon:  ceph osd pool delete rbd rbd --yes-i-really-really-mean-it
78
2016-07-18 19:48:50.558700 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa51805ab30 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa51805bde0).fault
79
2016-07-18 19:48:53.571260 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c000c80 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c001f30).fault
80
2016-07-18 19:48:56.564706 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c004fe0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c006240).fault
81
2016-07-18 19:48:59.560102 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c003350 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c002350).fault
82
2016-07-18 19:49:02.560287 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0089e0).fault
83
2016-07-18 19:49:05.566868 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c003350 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c000c80).fault
84
2016-07-18 19:49:08.560964 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c001910).fault
85
2016-07-18 19:49:11.560969 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c008af0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0014d0).fault
86
2016-07-18 19:49:14.567297 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00a7a0).fault
87
2016-07-18 19:49:17.563401 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c008af0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c009d50).fault
88
2016-07-18 19:49:20.568222 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0040e0).fault
89
2016-07-18 19:49:23.562304 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00be50).fault
90
2016-07-18 19:49:26.563315 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0065b0).fault
91
2016-07-18 19:49:29.562744 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c007100).fault
92
2016-07-18 19:49:32.562918 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0050c0).fault
93
2016-07-18 19:49:35.563443 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c005d60).fault
94
2016-07-18 19:49:38.563680 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00d4a0).fault
95
2016-07-18 19:49:41.564038 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00e080).fault
96
2016-07-18 19:49:44.564478 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00e4b0).fault
97
2016-07-18 19:49:47.565892 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00efb0).fault
98
2016-07-18 19:49:50.565001 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00e670).fault
99
2016-07-18 19:49:53.565318 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c009210).fault
100
2016-07-18 19:49:56.565680 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c010780).fault
101
2016-07-18 19:49:59.566048 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00abf0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00c530).fault
102
2016-07-18 19:50:02.566947 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00bf40).fault
103
2016-07-18 19:50:05.567101 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
104
2016-07-18 19:50:08.567870 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00f830).fault
105
2016-07-18 19:50:11.568080 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
106
2016-07-18 19:50:14.568619 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0074a0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0008c0).fault
107
2016-07-18 19:50:17.568944 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
108
2016-07-18 19:50:20.569188 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c004540).fault
109
2016-07-18 19:50:23.569495 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
110
2016-07-18 19:50:26.571615 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c002e60).fault
111
2016-07-18 19:50:29.572054 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
112
2016-07-18 19:50:32.572563 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0032b0).fault
113
2016-07-18 19:50:35.572764 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
114
2016-07-18 19:50:38.573088 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c009db0).fault
115
2016-07-18 19:50:41.573692 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
116
2016-07-18 19:50:44.574248 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c008b70).fault
117
2016-07-18 19:50:47.577314 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
118
2016-07-18 19:50:50.574486 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c006030).fault
119
2016-07-18 19:50:53.574253 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
120
2016-07-18 19:50:56.574525 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c006360).fault
121
2016-07-18 19:50:59.575699 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
122
2016-07-18 19:51:02.576571 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00e160).fault
123
2016-07-18 19:51:05.576729 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
124
2016-07-18 19:51:08.577320 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00ee10).fault
125
2016-07-18 19:51:11.577895 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010980 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011be0).fault
126
2016-07-18 19:51:14.578784 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00eb60).fault
127
2016-07-18 19:51:17.578962 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c010850 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00e750).fault
128
2016-07-18 19:51:20.579356 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c009ac0).fault
129
2016-07-18 19:51:23.580555 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
130
2016-07-18 19:51:26.580913 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00cd60).fault
131
2016-07-18 19:51:29.581275 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
132
2016-07-18 19:51:32.581522 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007400 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011fc0).fault
133
2016-07-18 19:51:35.581946 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
134
2016-07-18 19:51:38.582208 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c007310 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c008570).fault
135
2016-07-18 19:51:41.582543 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
136
2016-07-18 19:51:44.582875 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006750 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00fc50).fault
137
2016-07-18 19:51:47.583148 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
138
2016-07-18 19:51:50.583721 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006750 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0019f0).fault
139
2016-07-18 19:51:53.584144 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
140
2016-07-18 19:51:56.584416 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006750 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c009f00).fault
141
2016-07-18 19:51:59.584778 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
142
2016-07-18 19:52:02.585128 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006750 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c002a60).fault
143
2016-07-18 19:52:05.585373 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
144
2016-07-18 19:52:08.585905 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006690 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c003a70).fault
145
2016-07-18 19:52:11.586231 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
146
2016-07-18 19:52:14.586622 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c007770).fault
147
2016-07-18 19:52:17.587127 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
148
2016-07-18 19:52:20.587470 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00d4f0).fault
149
2016-07-18 19:52:23.587908 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
150
2016-07-18 19:52:26.589064 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c005220).fault
151
2016-07-18 19:52:29.588731 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
152
2016-07-18 19:52:32.589110 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c005220).fault
153
2016-07-18 19:52:35.589533 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
154
2016-07-18 19:52:38.589956 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0097b0).fault
155
2016-07-18 19:52:41.590413 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0106e0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011940).fault
156
2016-07-18 19:52:44.590786 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c009410).fault
157
2016-07-18 19:52:47.591213 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00ab40 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011e80).fault
158
2016-07-18 19:52:50.591572 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00c300).fault
159
2016-07-18 19:52:53.592074 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00ab40 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0134c0).fault
160
2016-07-18 19:52:56.592364 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c0117b0).fault
161
2016-07-18 19:52:59.592687 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00ab40 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011f90).fault
162
2016-07-18 19:53:02.593110 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00d410).fault
163
2016-07-18 19:53:05.593463 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00ab40 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011d40).fault
164
2016-07-18 19:53:08.593904 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00d410).fault
165
2016-07-18 19:53:11.594340 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00aa60 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c011d40).fault
166
2016-07-18 19:53:14.594742 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00d410).fault
167
2016-07-18 19:53:17.595148 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00a860 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00a5f0).fault
168
2016-07-18 19:53:20.595455 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00d410).fault
169
2016-07-18 19:53:23.595955 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00a5f0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00fa10).fault
170
2016-07-18 19:53:26.596358 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c006510 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c003630).fault
171
2016-07-18 19:53:29.596796 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00a3e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c003fd0).fault
172
2016-07-18 19:53:32.597169 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0063e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c00bd80).fault
173
2016-07-18 19:53:35.597676 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00a3e0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c003fd0).fault
174
2016-07-18 19:53:38.598096 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0063e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c002250).fault
175
2016-07-18 19:53:41.598617 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00a3e0 sd=4 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c003fd0).fault
176
2016-07-18 19:53:44.599174 7fa51c16d700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c0062c0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c007520).fault
177
2016-07-18 19:53:47.599627 7fa516ffd700  0 -- :/2375284273 >> 127.0.0.1:7116/0 pipe(0x7fa50c00a3e0 sd=5 :0 s=1 pgs=0 cs=0 l=1 c=0x7fa50c005450).fault
178
2016-07-18 19:53:50.558628 7fa5200ee700  0 monclient(hunting): authenticate timed out after 300
179
2016-07-18 19:53:50.558717 7fa5200ee700  0 librados: client.admin authentication error (110) Connection timed out
180
Error connecting to cluster: TimedOut
181
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:379: run_mon:  return 1
182
/home/jenkins-build/build/workspace/ceph-pull-requests/src/test/libradosstriper/rados-striper.sh:33: run:  return 1
183
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1457: main:  display_logs testdir/rados-striper
184
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1318: display_logs:  local dir=testdir/rados-striper
185
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1320: display_logs:  find testdir/rados-striper -maxdepth 1 -name '*.log'
186
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1321: display_logs:  read file
187
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1322: display_logs:  echo '======================= testdir/rados-striper/mon.a.log'
188
======================= testdir/rados-striper/mon.a.log
189
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1323: display_logs:  cat testdir/rados-striper/mon.a.log
190
2016-07-18 19:48:50.192869 7f0c083ea4c0  0 ceph version v11.0.0-674-g8aae974 (8aae9740fad1909e8d010f899e8ed88f672e1859), process ceph-mon, pid 22296
191
2016-07-18 19:48:50.223847 7f0c083ea4c0 10 load: jerasure load: lrc load: isa 
192
2016-07-18 19:48:50.224092 7f0c083ea4c0  1 leveldb: Recovering log #3
193
2016-07-18 19:48:50.224123 7f0c083ea4c0  1 leveldb: Level-0 table #5: started
194
2016-07-18 19:48:50.228717 7f0c083ea4c0  1 leveldb: Level-0 table #5: 559 bytes OK
195
2016-07-18 19:48:50.230047 7f0c083ea4c0  1 leveldb: Delete type=0 #3
196

    
197
2016-07-18 19:48:50.230077 7f0c083ea4c0  1 leveldb: Delete type=3 #2
198

    
199
2016-07-18 19:48:50.230179 7f0c083ea4c0 10 obtain_monmap
200
2016-07-18 19:48:50.230195 7f0c083ea4c0 10 obtain_monmap found mkfs monmap
201
2016-07-18 19:48:50.230249 7f0c083ea4c0  0 starting mon.a rank 0 at 127.0.0.1:7116/0 mon_data testdir/rados-striper/a fsid 414e5204-fcfb-4176-ab89-aa7c684bd782
202
2016-07-18 19:48:50.230271 7f0c083ea4c0 10 -- :/0 rank.bind 127.0.0.1:7116/0
203
2016-07-18 19:48:50.230278 7f0c083ea4c0 10 accepter.accepter.bind
204
2016-07-18 19:48:50.230308 7f0c083ea4c0 10 accepter.accepter.bind bound to 127.0.0.1:7116/0
205
2016-07-18 19:48:50.230318 7f0c083ea4c0  1 -- 127.0.0.1:7116/0 learned my addr 127.0.0.1:7116/0
206
2016-07-18 19:48:50.230324 7f0c083ea4c0  1 accepter.accepter.bind my_inst.addr is 127.0.0.1:7116/0 need_addr=0
207
2016-07-18 19:48:50.230497 7f0c083ea4c0  1 mon.a@-1(probing) e0 preinit fsid 414e5204-fcfb-4176-ab89-aa7c684bd782
208
2016-07-18 19:48:50.230522 7f0c083ea4c0 10 mon.a@-1(probing) e0 check_fsid cluster_uuid contains '414e5204-fcfb-4176-ab89-aa7c684bd782'
209
2016-07-18 19:48:50.230534 7f0c083ea4c0 10 mon.a@-1(probing) e0 features compat={},rocompat={},incompat={1=initial feature set (~v.18),3=single paxos with k/v store (v0.?)}
210
2016-07-18 19:48:50.230541 7f0c083ea4c0 10 mon.a@-1(probing) e0 apply_compatset_features_to_quorum_requirements required_features 0
211
2016-07-18 19:48:50.230544 7f0c083ea4c0 10 mon.a@-1(probing) e0 required_features 0
212
2016-07-18 19:48:50.230551 7f0c083ea4c0 10 mon.a@-1(probing) e0 has_ever_joined = 0
213
2016-07-18 19:48:50.230559 7f0c083ea4c0 10 mon.a@-1(probing) e0 sync_last_committed_floor 0
214
2016-07-18 19:48:50.230564 7f0c083ea4c0 10 mon.a@-1(probing) e0 init_paxos
215
2016-07-18 19:48:50.230571 7f0c083ea4c0 10 mon.a@-1(probing).paxos(paxos recovering c 0..0) init last_pn: 0 accepted_pn: 0 last_committed: 0 first_committed: 0
216
2016-07-18 19:48:50.230577 7f0c083ea4c0 10 mon.a@-1(probing).paxos(paxos recovering c 0..0) init
217
2016-07-18 19:48:50.230591 7f0c083ea4c0  1 mon.a@-1(probing).mds e0 Unable to load 'last_metadata'
218
2016-07-18 19:48:50.230624 7f0c083ea4c0 10 mon.a@-1(probing) e0 refresh_from_paxos
219
2016-07-18 19:48:50.230632 7f0c083ea4c0 10 mon.a@-1(probing) e0 refresh_from_paxos no cluster_fingerprint
220
2016-07-18 19:48:50.230641 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(pgmap 0..0) refresh
221
2016-07-18 19:48:50.230652 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(mdsmap 0..0) refresh
222
2016-07-18 19:48:50.230659 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(osdmap 0..0) refresh
223
2016-07-18 19:48:50.230667 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(logm 0..0) refresh
224
2016-07-18 19:48:50.230671 7f0c083ea4c0 10 mon.a@-1(probing).log v0 update_from_paxos
225
2016-07-18 19:48:50.230673 7f0c083ea4c0 10 mon.a@-1(probing).log v0 update_from_paxos version 0 summary v 0
226
2016-07-18 19:48:50.230681 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(monmap 0..0) refresh
227
2016-07-18 19:48:50.230689 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(auth 0..0) refresh
228
2016-07-18 19:48:50.230693 7f0c083ea4c0 10 mon.a@-1(probing).auth v0 update_from_paxos
229
2016-07-18 19:48:50.230698 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(pgmap 0..0) post_refresh
230
2016-07-18 19:48:50.230700 7f0c083ea4c0 10 mon.a@-1(probing).pg v0 post_paxos_update
231
2016-07-18 19:48:50.230706 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(mdsmap 0..0) post_refresh
232
2016-07-18 19:48:50.230707 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(osdmap 0..0) post_refresh
233
2016-07-18 19:48:50.230708 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(logm 0..0) post_refresh
234
2016-07-18 19:48:50.230709 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(monmap 0..0) post_refresh
235
2016-07-18 19:48:50.230710 7f0c083ea4c0 10 mon.a@-1(probing).paxosservice(auth 0..0) post_refresh
236
2016-07-18 19:48:50.230712 7f0c083ea4c0 10 mon.a@-1(probing).health(0) init
237
2016-07-18 19:48:50.230780 7f0c083ea4c0  1 -- 127.0.0.1:7116/0 messenger.start
238
2016-07-18 19:48:50.230805 7f0c083ea4c0  2 mon.a@-1(probing) e0 init
239
2016-07-18 19:48:50.230825 7f0c083ea4c0 10 -- 127.0.0.1:7116/0 ready 127.0.0.1:7116/0
240
2016-07-18 19:48:50.230850 7f0c083ea4c0  1 accepter.accepter.start
241
2016-07-18 19:48:50.230862 7f0c083ea4c0 10 mon.a@-1(probing) e0 bootstrap
242
2016-07-18 19:48:50.230865 7f0c083ea4c0 10 mon.a@-1(probing) e0 sync_reset_requester
243
2016-07-18 19:48:50.230866 7f0c083ea4c0 10 mon.a@-1(probing) e0 unregister_cluster_logger - not registered
244
2016-07-18 19:48:50.230867 7f0c083ea4c0 10 mon.a@-1(probing) e0 cancel_probe_timeout (none scheduled)
245
2016-07-18 19:48:50.230867 7f0c083ea4c0  0 mon.a@-1(probing) e0  my rank is now 0 (was -1)
246
2016-07-18 19:48:50.230869 7f0c083ea4c0  1 -- 127.0.0.1:7116/0 mark_down_all
247
2016-07-18 19:48:50.230872 7f0c083ea4c0 10 mon.a@0(probing) e0 _reset
248
2016-07-18 19:48:50.230872 7f0c083ea4c0 10 mon.a@0(probing) e0 cancel_probe_timeout (none scheduled)
249
2016-07-18 19:48:50.230873 7f0c083ea4c0 10 mon.a@0(probing) e0 timecheck_finish
250
2016-07-18 19:48:50.230876 7f0c083ea4c0 15 mon.a@0(probing) e0 health_tick_stop
251
2016-07-18 19:48:50.230877 7f0c083ea4c0 15 mon.a@0(probing) e0 health_interval_stop
252
2016-07-18 19:48:50.230877 7f0c083ea4c0 10 mon.a@0(probing) e0 scrub_event_cancel
253
2016-07-18 19:48:50.230879 7f0c083ea4c0 10 mon.a@0(probing) e0 scrub_reset
254
2016-07-18 19:48:50.230880 7f0c083ea4c0 10 mon.a@0(probing).paxos(paxos recovering c 0..0) restart -- canceling timeouts
255
2016-07-18 19:48:50.230884 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(pgmap 0..0) restart
256
2016-07-18 19:48:50.230894 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(mdsmap 0..0) restart
257
2016-07-18 19:48:50.230895 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(osdmap 0..0) restart
258
2016-07-18 19:48:50.230896 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(logm 0..0) restart
259
2016-07-18 19:48:50.230897 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(monmap 0..0) restart
260
2016-07-18 19:48:50.230897 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(auth 0..0) restart
261
2016-07-18 19:48:50.230900 7f0c083ea4c0  1 mon.a@0(probing) e0 win_standalone_election
262
2016-07-18 19:48:50.230909 7f0c083ea4c0  1 mon.a@0(probing).elector(1) init, last seen epoch 1
263
2016-07-18 19:48:50.230911 7f0c083ea4c0 10 mon.a@0(probing).elector(1) bump_epoch 1 to 2
264
2016-07-18 19:48:50.231498 7f0c083ea4c0 10 mon.a@0(probing) e0 join_election
265
2016-07-18 19:48:50.231502 7f0c083ea4c0 10 mon.a@0(probing) e0 _reset
266
2016-07-18 19:48:50.231502 7f0c083ea4c0 10 mon.a@0(probing) e0 cancel_probe_timeout (none scheduled)
267
2016-07-18 19:48:50.231503 7f0c083ea4c0 10 mon.a@0(probing) e0 timecheck_finish
268
2016-07-18 19:48:50.231504 7f0c083ea4c0 15 mon.a@0(probing) e0 health_tick_stop
269
2016-07-18 19:48:50.231505 7f0c083ea4c0 15 mon.a@0(probing) e0 health_interval_stop
270
2016-07-18 19:48:50.231505 7f0c083ea4c0 10 mon.a@0(probing) e0 scrub_event_cancel
271
2016-07-18 19:48:50.231506 7f0c083ea4c0 10 mon.a@0(probing) e0 scrub_reset
272
2016-07-18 19:48:50.231507 7f0c083ea4c0 10 mon.a@0(probing).paxos(paxos recovering c 0..0) restart -- canceling timeouts
273
2016-07-18 19:48:50.231510 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(pgmap 0..0) restart
274
2016-07-18 19:48:50.231511 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(mdsmap 0..0) restart
275
2016-07-18 19:48:50.231512 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(osdmap 0..0) restart
276
2016-07-18 19:48:50.231512 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(logm 0..0) restart
277
2016-07-18 19:48:50.231513 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(monmap 0..0) restart
278
2016-07-18 19:48:50.231514 7f0c083ea4c0 10 mon.a@0(probing).paxosservice(auth 0..0) restart
279
2016-07-18 19:48:50.231516 7f0c083ea4c0 10 mon.a@0(electing) e0 win_election epoch 2 quorum 0 features 576460752032890879
280
2016-07-18 19:48:50.231539 7f0c083ea4c0  0 log_channel(cluster) log [INF] : mon.a@0 won leader election with quorum 0
281
2016-07-18 19:48:50.231554 7f0c083ea4c0  1 -- 127.0.0.1:7116/0 --> 127.0.0.1:7116/0 -- log(1 entries from seq 1 at 2016-07-18 19:48:50.231541) v1 -- ?+0 0xb28cb40 con 0xb1c8800
282
2016-07-18 19:48:50.231561 7f0c083ea4c0 20 -- 127.0.0.1:7116/0 submit_message log(1 entries from seq 1 at 2016-07-18 19:48:50.231541) v1 local
283
2016-07-18 19:48:50.231567 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(monmap 0..0) election_finished
284
2016-07-18 19:48:50.231569 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(monmap 0..0) _active
285
2016-07-18 19:48:50.231570 7f0c083ea4c0  7 mon.a@0(leader).paxosservice(monmap 0..0) _active creating new pending
286
2016-07-18 19:48:50.231580 7f0c083ea4c0 10 mon.a@0(leader).monmap v0 create_pending monmap epoch 1
287
2016-07-18 19:48:50.231582 7f0c083ea4c0 10 mon.a@0(leader).monmap v0 create_initial using current monmap
288
2016-07-18 19:48:50.231583 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(monmap 0..0) propose_pending
289
2016-07-18 19:48:50.231603 7f0c083ea4c0 10 mon.a@0(leader).monmap v0 encode_pending epoch 1
290
2016-07-18 19:48:50.231637 7f0c083ea4c0 10 mon.a@0(leader) e0 prepare_new_fingerprint proposing cluster_fingerprint 087dfb0a-97f9-4757-a426-8f2e6120e1c0
291
2016-07-18 19:48:50.231642 7f0c083ea4c0  5 mon.a@0(leader).paxos(paxos active c 0..0) queue_pending_finisher 0xb18c180
292
2016-07-18 19:48:50.231646 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos active c 0..0) trigger_propose active, proposing now
293
2016-07-18 19:48:50.231659 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos active c 0..0) propose_pending 1 411 bytes
294
2016-07-18 19:48:50.231663 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos updating c 0..0) begin for 1 411 bytes
295
2016-07-18 19:48:50.232058 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos updating c 0..0) commit_start 1
296
2016-07-18 19:48:50.232072 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(pgmap 0..0) election_finished
297
2016-07-18 19:48:50.232073 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(pgmap 0..0) _active
298
2016-07-18 19:48:50.232074 7f0c083ea4c0  7 mon.a@0(leader).paxosservice(pgmap 0..0) _active creating new pending
299
2016-07-18 19:48:50.232079 7f0c083ea4c0 10 mon.a@0(leader).pg v0 create_pending v 1
300
2016-07-18 19:48:50.232089 7f0c083ea4c0 10 mon.a@0(leader).pg v0 create_initial -- creating initial map
301
2016-07-18 19:48:50.232098 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(pgmap 0..0) propose_pending
302
2016-07-18 19:48:50.232102 7f0c083ea4c0 10 mon.a@0(leader).pg v0 encode_pending v 1
303
2016-07-18 19:48:50.232118 7f0c083ea4c0  5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0xb18c160
304
2016-07-18 19:48:50.232121 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later
305
2016-07-18 19:48:50.232122 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(mdsmap 0..0) election_finished
306
2016-07-18 19:48:50.232123 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(mdsmap 0..0) _active
307
2016-07-18 19:48:50.232124 7f0c083ea4c0  7 mon.a@0(leader).paxosservice(mdsmap 0..0) _active creating new pending
308
2016-07-18 19:48:50.232135 7f0c083ea4c0 10 mon.a@0(leader).mds e0 create_pending e1
309
2016-07-18 19:48:50.232137 7f0c083ea4c0 10 mon.a@0(leader).mds e0 create_initial
310
2016-07-18 19:48:50.232138 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(mdsmap 0..0) propose_pending
311
2016-07-18 19:48:50.232150 7f0c083ea4c0 10 mon.a@0(leader).mds e0 encode_pending e1
312
2016-07-18 19:48:50.232172 7f0c083ea4c0  5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0xb18c240
313
2016-07-18 19:48:50.232175 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later
314
2016-07-18 19:48:50.232177 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(osdmap 0..0) election_finished
315
2016-07-18 19:48:50.232178 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(osdmap 0..0) _active
316
2016-07-18 19:48:50.232178 7f0c083ea4c0  7 mon.a@0(leader).paxosservice(osdmap 0..0) _active creating new pending
317
2016-07-18 19:48:50.232184 7f0c083ea4c0 10 mon.a@0(leader).osd e0 create_pending e 1
318
2016-07-18 19:48:50.232207 7f0c083ea4c0 10 mon.a@0(leader).osd e0 create_pending  did clean_temps
319
2016-07-18 19:48:50.232212 7f0c083ea4c0 10 mon.a@0(leader).osd e0 create_initial for 414e5204-fcfb-4176-ab89-aa7c684bd782
320
2016-07-18 19:48:50.232420 7f0c083ea4c0 20 mon.a@0(leader).osd e0  full crc 3623640541
321
2016-07-18 19:48:50.232430 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(osdmap 0..0) propose_pending
322
2016-07-18 19:48:50.232433 7f0c083ea4c0 10 mon.a@0(leader).osd e0 encode_pending e 1
323
2016-07-18 19:48:50.232483 7f0c083ea4c0 20 mon.a@0(leader).osd e0  full_crc 3623640541 inc_crc 1456793561
324
2016-07-18 19:48:50.232499 7f0c083ea4c0  5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0xb18c220
325
2016-07-18 19:48:50.232502 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later
326
2016-07-18 19:48:50.232504 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(logm 0..0) election_finished
327
2016-07-18 19:48:50.232505 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(logm 0..0) _active
328
2016-07-18 19:48:50.232505 7f0c083ea4c0  7 mon.a@0(leader).paxosservice(logm 0..0) _active creating new pending
329
2016-07-18 19:48:50.232508 7f0c083ea4c0 10 mon.a@0(leader).log v0 create_pending v 1
330
2016-07-18 19:48:50.232509 7f0c083ea4c0 10 mon.a@0(leader).log v0 create_initial -- creating initial map
331
2016-07-18 19:48:50.232518 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(logm 0..0) propose_pending
332
2016-07-18 19:48:50.232521 7f0c083ea4c0 10 mon.a@0(leader).log v0 encode_full log v 0
333
2016-07-18 19:48:50.232526 7f0c083ea4c0 10 mon.a@0(leader).log v0 encode_pending v1
334
2016-07-18 19:48:50.232540 7f0c083ea4c0  5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0xb18c210
335
2016-07-18 19:48:50.232544 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later
336
2016-07-18 19:48:50.232545 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(auth 0..0) election_finished
337
2016-07-18 19:48:50.232546 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(auth 0..0) _active
338
2016-07-18 19:48:50.232547 7f0c083ea4c0  7 mon.a@0(leader).paxosservice(auth 0..0) _active creating new pending
339
2016-07-18 19:48:50.232548 7f0c083ea4c0 10 mon.a@0(leader).auth v0 create_pending v 1
340
2016-07-18 19:48:50.232551 7f0c083ea4c0 10 mon.a@0(leader).auth v0 create_initial -- creating initial map
341
2016-07-18 19:48:50.232759 7f0c083ea4c0 10 mon.a@0(leader).auth v0 check_rotate updated rotating
342
2016-07-18 19:48:50.232771 7f0c083ea4c0 10 mon.a@0(leader).paxosservice(auth 0..0) propose_pending
343
2016-07-18 19:48:50.232782 7f0c083ea4c0 10 mon.a@0(leader).auth v0 encode_pending v 1
344
2016-07-18 19:48:50.232800 7f0c083ea4c0  5 mon.a@0(leader).paxos(paxos writing c 0..0) queue_pending_finisher 0xb18c250
345
2016-07-18 19:48:50.232804 7f0c083ea4c0 10 mon.a@0(leader).paxos(paxos writing c 0..0) trigger_propose not active, will propose later
346
2016-07-18 19:48:50.232805 7f0c083ea4c0 10 mon.a@0(leader).data_health(2) start_epoch epoch 2
347
2016-07-18 19:48:50.232811 7f0c083ea4c0  1 mon.a@0(leader) e0 apply_quorum_to_compatset_features enabling new quorum features: compat={},rocompat={},incompat={4=support erasure code pools,5=new-style osdmap encoding,6=support isa/lrc erasure code,7=support shec erasure code}
348
2016-07-18 19:48:50.233244 7f0c083ea4c0 10 mon.a@0(leader) e0 apply_compatset_features_to_quorum_requirements required_features 9025616074506240
349
2016-07-18 19:48:50.233253 7f0c083ea4c0 10 mon.a@0(leader) e0 timecheck_finish
350
2016-07-18 19:48:50.233256 7f0c083ea4c0 10 mon.a@0(leader) e0 resend_routed_requests
351
2016-07-18 19:48:50.233257 7f0c083ea4c0 10 mon.a@0(leader) e0 register_cluster_logger
352
2016-07-18 19:48:50.251223 7f0bff949700 10 accepter.accepter starting
353
2016-07-18 19:48:50.251235 7f0bff949700 20 accepter.accepter calling poll
354
2016-07-18 19:48:50.251233 7f0c0194d700 10 -- 127.0.0.1:7116/0 reaper_entry start
355
2016-07-18 19:48:50.251246 7f0c0194d700 10 -- 127.0.0.1:7116/0 reaper
356
2016-07-18 19:48:50.251248 7f0c0194d700 10 -- 127.0.0.1:7116/0 reaper done
357
2016-07-18 19:48:50.251258 7f0c0014a700 20 -- 127.0.0.1:7116/0 queue 0xb28cb40 prio 196
358
2016-07-18 19:48:50.252589 7f0c0094b700  1 -- 127.0.0.1:7116/0 <== mon.0 127.0.0.1:7116/0 0 ==== log(1 entries from seq 1 at 2016-07-18 19:48:50.231541) v1 ==== 0+0+0 (0 0 0) 0xb28cb40 con 0xb1c8800
359
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1321: display_logs:  read file
360
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1458: main:  code=1
361
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1460: main:  teardown testdir/rados-striper
362
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:118: teardown:  local dir=testdir/rados-striper
363
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:119: teardown:  kill_daemons testdir/rados-striper KILL
364
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:252: kill_daemons:  shopt -q -o xtrace
365
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:252: kill_daemons:  echo true
366
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:252: kill_daemons:  local trace=true
367
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:253: kill_daemons:  true
368
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:253: kill_daemons:  shopt -u -o xtrace
369
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:269: kill_daemons:  return 0
370
//home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:120: teardown:  stat -f -c %T .
371
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:120: teardown:  '[' xfs == btrfs ']'
372
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:123: teardown:  rm -fr testdir/rados-striper
373
/home/jenkins-build/build/workspace/ceph-pull-requests/qa/workunits/ceph-helpers.sh:1461: main:  return 1