Project

General

Profile

ceph.conf.txt

Laura Paduano, 02/15/2019 12:21 PM

Download (6.5 KB)

 
1
; generated by vstart.sh on Fri Feb 15 10:56:49 UTC 2019
2
[client.vstart.sh]
3
        num mon = 3
4
        num osd = 3
5
        num mds = 3
6
        num mgr = 2
7
        num rgw = 1
8

    
9
[global]
10
        fsid = 24dd9774-9454-4dd7-a74d-3de8208aa5c6
11
        osd failsafe full ratio = .99
12
        mon osd full ratio = .99
13
        mon osd nearfull ratio = .99
14
        mon osd backfillfull ratio = .99
15
        erasure code dir = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/lib
16
        plugin dir = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/lib
17
        filestore fd cache size = 32
18
        run dir = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out
19
	crash dir = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out
20
        enable experimental unrecoverable data corrupting features = *
21
	osd_crush_chooseleaf_type = 0
22
	debug asok assert abort = true
23
ms bind msgr2 = true
24
ms bind msgr1 = true
25

    
26

    
27
        lockdep = true
28
	auth cluster required = cephx
29
	auth service required = cephx
30
	auth client required = cephx
31
[client]
32
        keyring = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/keyring
33
        log file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.$pid.log
34
        admin socket = /tmp/ceph-asok.nYpdCA/$name.$pid.asok
35

    
36
[client.rgw]
37
        rgw frontends = civetweb port=8000
38
        admin socket = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/radosgw.8000.asok
39
        ; needed for s3tests
40
        rgw crypt s3 kms encryption keys = testkey-1=YmluCmJvb3N0CmJvb3N0LWJ1aWxkCmNlcGguY29uZgo= testkey-2=aWIKTWFrZWZpbGUKbWFuCm91dApzcmMKVGVzdGluZwo=
41
        rgw crypt require ssl = false
42
        ; uncomment the following to set LC days as the value in seconds;
43
        ; needed for passing lc time based s3-tests (can be verbose)
44
        ; rgw lc debug interval = 10
45
[mds]
46

    
47
        log file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.log
48
        admin socket = /tmp/ceph-asok.nYpdCA/$name.asok
49
        chdir = ""
50
        pid file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.pid
51
        heartbeat file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.heartbeat
52

    
53
        mds data = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/mds.$id
54
        mds root ino uid = 1001
55
        mds root ino gid = 1001
56

    
57
[mgr]
58
        mgr data = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/mgr.$id
59
        mgr module path = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/src/pybind/mgr
60

    
61
        log file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.log
62
        admin socket = /tmp/ceph-asok.nYpdCA/$name.asok
63
        chdir = ""
64
        pid file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.pid
65
        heartbeat file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.heartbeat
66

    
67

    
68
[osd]
69

    
70
        log file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.log
71
        admin socket = /tmp/ceph-asok.nYpdCA/$name.asok
72
        chdir = ""
73
        pid file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.pid
74
        heartbeat file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.heartbeat
75

    
76
        osd_check_max_object_name_len_on_startup = false
77
        osd data = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/osd$id
78
        osd journal = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/osd$id/journal
79
        osd journal size = 100
80
        osd class tmp = out
81
        osd class dir = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/lib
82
        osd class load list = *
83
        osd class default list = *
84

    
85
        filestore wbthrottle xfs ios start flusher = 10
86
        filestore wbthrottle xfs ios hard limit = 20
87
        filestore wbthrottle xfs inodes hard limit = 30
88
        filestore wbthrottle btrfs ios start flusher = 10
89
        filestore wbthrottle btrfs ios hard limit = 20
90
        filestore wbthrottle btrfs inodes hard limit = 30
91
        bluestore fsck on mount = true
92
        bluestore block create = true
93
        bluestore block db path = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/osd$id/block.db.file
94
        bluestore block db size = 67108864
95
        bluestore block db create = true
96
        bluestore block wal path = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/osd$id/block.wal.file
97
        bluestore block wal size = 1048576000
98
        bluestore block wal create = true
99

    
100
        ; kstore
101
        kstore fsck on mount = true
102
        osd objectstore = bluestore
103

    
104

    
105
[mon]
106
        mgr initial modules = dashboard restful iostat
107

    
108
        log file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.log
109
        admin socket = /tmp/ceph-asok.nYpdCA/$name.asok
110
        chdir = ""
111
        pid file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.pid
112
        heartbeat file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/$name.heartbeat
113

    
114

    
115
        debug mon = 20
116
        debug paxos = 20
117
        debug auth = 20
118
	debug mgrc = 20
119
        debug ms = 1
120

    
121
        mon cluster log file = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/out/cluster.mon.$id.log
122
        osd pool default erasure code profile = plugin=jerasure technique=reed_sol_van k=2 m=1 crush-failure-domain=osd
123
[mon.a]
124
        host = ceph-builders
125
        mon data = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/mon.a
126
[mon.b]
127
        host = ceph-builders
128
        mon data = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/mon.b
129
[mon.c]
130
        host = ceph-builders
131
        mon data = /home/jenkins-build/build/workspace/ceph-dashboard-pull-requests/build/dev/mon.c
132
[global]
133
        mon host =  [v2:127.0.0.1:40619,v1:127.0.0.1:40620] [v2:127.0.0.1:40621,v1:127.0.0.1:40622] [v2:127.0.0.1:40623,v1:127.0.0.1:40624]
134
[mgr.x]
135
        host = ceph-builders
136
[mgr.y]
137
        host = ceph-builders
138
[osd.0]
139
        host = ceph-builders
140
[osd.1]
141
        host = ceph-builders
142
[osd.2]
143
        host = ceph-builders
144
[mds.a]
145
        host = ceph-builders
146
[mds.b]
147
        host = ceph-builders
148
[mds.c]
149
        host = ceph-builders