Project

General

Profile

Bug #38328 » ceph.conf.txt

Laura Paduano, 02/15/2019 12:21 PM

 
; generated by vstart.sh on Fri Feb 15 10:56:49 UTC 2019
[client.vstart.sh]
num mon = 3
num osd = 3
num mds = 3
num mgr = 2
num rgw = 1

[global]
fsid = 24dd9774-9454-4dd7-a74d-3de8208aa5c6
osd failsafe full ratio = .99
mon osd full ratio = .99
mon osd nearfull ratio = .99
mon osd backfillfull ratio = .99
erasure code dir = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/lib
plugin dir = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/lib
filestore fd cache size = 32
run dir = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out
crash dir = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out
enable experimental unrecoverable data corrupting features = *
osd_crush_chooseleaf_type = 0
debug asok assert abort = true
ms bind msgr2 = true
ms bind msgr1 = true


lockdep = true
auth cluster required = cephx
auth service required = cephx
auth client required = cephx
[client]
keyring = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/keyring
log file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.$pid.log
admin socket = /tmp/ceph-asok.nYpdCA/$name.$pid.asok

[client.rgw]
rgw frontends = civetweb port=8000
admin socket = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/radosgw.8000.asok
; needed for s3tests
rgw crypt s3 kms encryption keys = testkey-1=YmluCmJvb3N0CmJvb3N0LWJ1aWxkCmNlcGguY29uZgo= testkey-2=aWIKTWFrZWZpbGUKbWFuCm91dApzcmMKVGVzdGluZwo=
rgw crypt require ssl = false
; uncomment the following to set LC days as the value in seconds;
; needed for passing lc time based s3-tests (can be verbose)
; rgw lc debug interval = 10
[mds]

log file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.log
admin socket = /tmp/ceph-asok.nYpdCA/$name.asok
chdir = ""
pid file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.pid
heartbeat file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.heartbeat

mds data = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/mds.$id
mds root ino uid = 1001
mds root ino gid = 1001

[mgr]
mgr data = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/mgr.$id
mgr module path = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/src/pybind/mgr

log file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.log
admin socket = /tmp/ceph-asok.nYpdCA/$name.asok
chdir = ""
pid file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.pid
heartbeat file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.heartbeat


[osd]

log file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.log
admin socket = /tmp/ceph-asok.nYpdCA/$name.asok
chdir = ""
pid file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.pid
heartbeat file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.heartbeat

osd_check_max_object_name_len_on_startup = false
osd data = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/osd$id
osd journal = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/osd$id/journal
osd journal size = 100
osd class tmp = out
osd class dir = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/lib
osd class load list = *
osd class default list = *

filestore wbthrottle xfs ios start flusher = 10
filestore wbthrottle xfs ios hard limit = 20
filestore wbthrottle xfs inodes hard limit = 30
filestore wbthrottle btrfs ios start flusher = 10
filestore wbthrottle btrfs ios hard limit = 20
filestore wbthrottle btrfs inodes hard limit = 30
bluestore fsck on mount = true
bluestore block create = true
bluestore block db path = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/osd$id/block.db.file
bluestore block db size = 67108864
bluestore block db create = true
bluestore block wal path = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/osd$id/block.wal.file
bluestore block wal size = 1048576000
bluestore block wal create = true

; kstore
kstore fsck on mount = true
osd objectstore = bluestore


[mon]
mgr initial modules = dashboard restful iostat

log file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.log
admin socket = /tmp/ceph-asok.nYpdCA/$name.asok
chdir = ""
pid file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.pid
heartbeat file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.heartbeat


debug mon = 20
debug paxos = 20
debug auth = 20
debug mgrc = 20
debug ms = 1

mon cluster log file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/cluster.mon.$id.log
osd pool default erasure code profile = plugin=jerasure technique=reed_sol_van k=2 m=1 crush-failure-domain=osd
[mon.a]
host = ceph-builders
mon data = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/mon.a
[mon.b]
host = ceph-builders
mon data = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/mon.b
[mon.c]
host = ceph-builders
mon data = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/mon.c
[global]
mon host = [v2:127.0.0.1:40619,v1:127.0.0.1:40620] [v2:127.0.0.1:40621,v1:127.0.0.1:40622] [v2:127.0.0.1:40623,v1:127.0.0.1:40624]
[mgr.x]
host = ceph-builders
[mgr.y]
host = ceph-builders
[osd.0]
host = ceph-builders
[osd.1]
host = ceph-builders
[osd.2]
host = ceph-builders
[mds.a]
host = ceph-builders
[mds.b]
host = ceph-builders
[mds.c]
host = ceph-builders
(1-1/2)