Project

General

Profile

Sepia » History » Version 90

« Previous - Version 90/220 (diff) - Next » - Current version
Cron Tab, 05/25/2016 01:00 PM


Crontab
MAILTO="ceph-infra@redhat.com;yweinste@redhat.com" 
#MAILTO="tmuthami@redhat.com;yweinste@redhat.com" 
CEPH_QA_EMAIL="ceph-qa@ceph.com" 

# Edit this file to introduce tasks to be run by cron.
# 
# Each task to run has to be defined through a single line
# indicating with different fields when the task will be run
# and what command to run for the task
# 
# To define the time you can provide concrete values for
# minute (m), hour (h), day of month (dom), month (mon),
# and day of week (dow) or use '*' in these fields (for 'any').# 
# Notice that tasks will be started based on the cron's system
# daemon's notion of time and timezones.
# 
# Output of the crontab jobs (including errors) is sent through
# email to the user the crontab file belongs to (unless redirected).
# 
# For example, you can run a backup of all your user accounts
# at 5 a.m every week with:
# 0 5 * * 1 tar -zcf /var/backups/home.tgz /home/
# 
# For more information see the manual pages of crontab(5) and cron(8)
# 
# m h  dom mon dow   command
PATH=/home/teuthology/src/teuthology_master/virtualenv/bin:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin

#* * * * * /home/teuthology/bin/cron_wrapper date
#* * * * * /home/teuthology/bin/cron_wrapper date_invalid

#queues status
#25 07 * * *   ~/yuriw2/queues_status_one $CEPH_QA_EMAIL

#Publish this crontab to the Tracker page http://tracker.ceph.com/projects/ceph-releases/wiki/Crontab
00 6 * * *  crontab=$(crontab -l | perl -p -e 's/</</g; s/>/>/g; s/&/&/g') ; header=$(echo h3. Crontab ; echo) ; curl --verbose -X PUT --header 'Content-type: application/xml' --data-binary '<?xml version="1.0"?><wiki_page><text>'"$header"'<pre>'"$crontab"'</pre></text></wiki_page>' http://tracker.ceph.com/projects/ceph-releases/wiki/sepia.xml?key=$(cat /etc/redmine-key)

#Publish this suite descriptions to the Tracker page http://tracker.ceph.com/projects/ceph-qa-suite/infernalis-x
10 8 * * *  SUITE_NAME=~/src/ceph-qa-suite_master/suites/upgrade/infernalis-x; crontab=$(teuthology-describe-tests --show-facet no $SUITE_NAME | perl -p -e 's/</</g; s/>/>/g; s/&/&/g') ; header=$(echo h4. $SUITE_NAME ; echo " "; echo " ") ; curl --verbose -X PUT --header 'Content-type: application/xml' --data-binary '<?xml version="1.0"?><wiki_page><text>'"$header"'<pre>'"$crontab"'</pre></text></wiki_page>' http://tracker.ceph.com/projects/ceph-qa-suite/wiki/infernalis-x.xml?key=$(cat /etc/redmine-key)

20 8 * * *  SUITE_NAME=~/src/ceph-qa-suite_master/suites/upgrade/infernalis; crontab=$(teuthology-describe-tests --show-facet no $SUITE_NAME | perl -p -e 's/</</g; s/>/>/g; s/&/&/g') ; header=$(echo h4. $SUITE_NAME ; echo " "; echo " ") ; curl --verbose -X PUT --header 'Content-type: application/xml' --data-binary '<?xml version="1.0"?><wiki_page><text>'"$header"'<pre>'"$crontab"'</pre></text></wiki_page>' http://tracker.ceph.com/projects/ceph-qa-suite/wiki/infernalis.xml?key=$(cat /etc/redmine-key)

30 8 * * *  SUITE_NAME=~/src/ceph-qa-suite_master/suites/ceph-ansible; crontab=$(teuthology-describe-tests --show-facet no $SUITE_NAME | perl -p -e 's/</</g; s/>/>/g; s/&/&/g') ; header=$(echo h4. $SUITE_NAME ; echo " "; echo " ") ; curl --verbose -X PUT --header 'Content-type: application/xml' --data-binary '<?xml version="1.0"?><wiki_page><text>'"$header"'<pre>'"$crontab"'</pre></text></wiki_page>' http://tracker.ceph.com/projects/ceph-qa-suite/wiki/ceph-ansible.xml?key=$(cat /etc/redmine-key)

# Ensure teuthology is up-to-date
45 16 * * *   cd /home/teuthology/src/teuthology_master && /home/teuthology/bin/cron_wrapper git pull
55 16 * * *   cd /home/teuthology/src/ceph-qa-suite_master && /home/teuthology/bin/cron_wrapper git pull
# Ensure ceph-sepia-secrets is up-to-date
35 16 * * *   cd /home/teuthology/ceph-sepia-secrets && /home/teuthology/bin/cron_wrapper git pull

6 23  * * 1,5 /home/teuthology/bin/cron_wrapper teuthology-suite -v -c master -m $MACHINE_NAME -s ceph-ansible -e $CEPH_QA_EMAIL ~/vps.yaml
9 23  * * 1,5 /home/teuthology/bin/cron_wrapper teuthology-suite -v -c master -m $MACHINE_NAME -s teuthology -e $CEPH_QA_EMAIL ~/vps.yaml

#Powercycle tests - lets try it manually and get a green before we add it to the nightlies
#02 01 * * 1,3,5,7 CEPH_BRANCH=jewel; MACHINE_NAME=mira; teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k testing -s powercycle -e $CEPH_QA_EMAIL

#********** master branch
#moved to ovh 00 5 * * *   teuthology-suite -v -c master -m plana,burnupi,mira -s smoke -k distro -p 70
#moved to ovh 10 5 */3 * * teuthology-suite -v -c master -m plana,burnupi,mira -k distro -s big

#moved to ovh 00 23 * * 1,5 teuthology-suite -v -c master -m plana,burnupi,mira -s rbd
05 17 * * 1,6 CEPH_BRANCH=master; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -s rgw -e $CEPH_QA_EMAIL
15 17 * * 1,6 CEPH_BRANCH=master; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -s fs -e $CEPH_QA_EMAIL
25 17 * * 1,6 CEPH_BRANCH=master; MACHINE_NAME=mira; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k testing -s kcephfs -e $CEPH_QA_EMAIL
35 17 * * 1,6 CEPH_BRANCH=master; MACHINE_NAME=mira; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k testing -s knfs -e $CEPH_QA_EMAIL
45 17 * * 1,6 CEPH_BRANCH=master; MACHINE_NAME=mira; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -s hadoop -e $CEPH_QA_EMAIL
55 17 * * 1,6 CEPH_BRANCH=master; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -s samba -e $CEPH_QA_EMAIL
#moved to ovh 16 23 * * 2   teuthology-suite -v -c master -m plana,burnupi,mira -s rest
#moved to ovh 06 23 * * 2,6 teuthology-suite -v -c master -m plana,burnupi,mira -k testing -s krbd
#moved to ovh 18 23 * * 6   teuthology-suite -v -c master -m plana,burnupi,mira -k testing -s multimds
#*********

########################## infernalis ##################
### rados suite divided into 14 parts 1 part every day of the 2 weeks, the bc part does 0-6 on even weeks and 7-13 on odd
### % is escaped because it is a special character to chron
FILTER_OUT="--filter-out=random.yaml\,async.yaml" 

#moved to ovh 00 21 * * 1 /home/teuthology/bin/schedule_rados.sh 0 infernalis "plana,burnupi,mira" $CEPH_QA_EMAIL $FILTER_OUT
#moved to ovh 00 21 * * 2 /home/teuthology/bin/schedule_rados.sh 1 infernalis "plana,burnupi,mira" $CEPH_QA_EMAIL $FILTER_OUT
#moved to ovh 00 21 * * 3 /home/teuthology/bin/schedule_rados.sh 2 infernalis "plana,burnupi,mira" $CEPH_QA_EMAIL $FILTER_OUT
#moved to ovh 00 21 * * 4 /home/teuthology/bin/schedule_rados.sh 3 infernalis "plana,burnupi,mira" $CEPH_QA_EMAIL $FILTER_OUT
#moved to ovh 00 21 * * 5 /home/teuthology/bin/schedule_rados.sh 4 infernalis "plana,burnupi,mira" $CEPH_QA_EMAIL $FILTER_OUT
#moved to ovh 00 21 * * 6 /home/teuthology/bin/schedule_rados.sh 5 infernalis "plana,burnupi,mira" $CEPH_QA_EMAIL $FILTER_OUT
#moved to ovh 00 21 * * 7 /home/teuthology/bin/schedule_rados.sh 6 infernalis "plana,burnupi,mira" $CEPH_QA_EMAIL $FILTER_OUT
##########################

#moved to ovh 00 23 * * 2 teuthology-suite -v -c infernalis -m plana,burnupi,mira -k distro -s rbd
#moved to ovh 02 23 * * 1 teuthology-suite -v -c infernalis -m plana,burnupi,mira -k distro -s rgw
#moved to ovh 04 23 * * 2 teuthology-suite -v -c infernalis -m plana,burnupi,mira  -s fs
#moved to ovh 06 23 * * 1 teuthology-suite -v -c infernalis -m plana,burnupi,mira -k testing -s krbd
#moved to ovh 08 23 * * 1 teuthology-suite -v -c infernalis -m plana,burnupi,mira -k testing -s kcephfs
#moved to ovh 10 23 * * 1 teuthology-suite -v -c infernalis -m plana,burnupi,mira -k testing -s knfs
#moved to ovh 12 23 * * 7 teuthology-suite -v -c infernalis -m plana,burnupi,mira  -s hadoop
#moved to ovh 14 23 * * 7 teuthology-suite -v -c infernalis -m plana,burnupi,mira  -s samba
#moved to ovh 16 23 * * 7 teuthology-suite -v -c infernalis -m plana,burnupi,mira  -s rest
#moved to ovh 18 23 * * 7 teuthology-suite -v -c infernalis -m plana,burnupi,mira  -k testing -s multimds
#moved to ovh 20 23 * * 2 teuthology-suite -v -c infernalis -m plana,burnupi,mira -k distro -s upgrade/client-upgrade

########################## jewel ##################
### rados suite divided into 14 parts 1 part every day of the 2 weeks, the bc part does 0-6 on even weeks and 7-13 on odd
### % is escaped because it is a special character to chron
### format for filters /home/teuthology/bin/schedule_rados.sh 0 infernalis $MACHINE_NAME $CEPH_QA_EMAIL $FILTER_OUT

00 22 * * 1 CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 0 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL
00 22 * * 2 CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 1 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL
00 22 * * 3 CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 2 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL
00 22 * * 4 CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 3 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL
00 22 * * 5 CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 4 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL
00 22 * * 6 CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 5 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL
00 22 * * 7 CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 6 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL

##########################

########################## hammer ##################
### rados suite divided into 14 parts 1 part every day of the 2 weeks, the bc part does 0-6 on even weeks and 7-13 on odd
### temporary running hammer rados on smithi per Sam's request as we observe many ENOSPC failures in ovh lab

00 03 * * 1 CEPH_BRANCH=hammer; MACHINE_NAME=vps; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 0 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL --distro=ubuntu
00 03 * * 2 CEPH_BRANCH=hammer; MACHINE_NAME=vps; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 1 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL --distro=ubuntu
00 03 * * 3 CEPH_BRANCH=hammer; MACHINE_NAME=vps; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 2 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL --distro=ubuntu
00 03 * * 4 CEPH_BRANCH=hammer; MACHINE_NAME=vps; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 3 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL --distro=ubuntu
00 03 * * 5 CEPH_BRANCH=hammer; MACHINE_NAME=vps; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 4 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL --distro=ubuntu
00 03 * * 6 CEPH_BRANCH=hammer; MACHINE_NAME=vps; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 5 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL --distro=ubuntu
00 03 * * 7 CEPH_BRANCH=hammer; MACHINE_NAME=vps; /home/teuthology/bin/cron_wrapper /home/teuthology/bin/schedule_rados.sh 6 $CEPH_BRANCH $MACHINE_NAME $CEPH_QA_EMAIL --distro=ubuntu

##########################

########################## jewel ##################

#--filter-out rbd_cli_tests.yaml while waiting for a fix issue 14825
00 02 * * 3,7  CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k distro -s rbd -e $CEPH_QA_EMAIL  --filter-out rbd_cli_tests.yaml 
05 02 * * 3,7  CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k distro -s rgw -e $CEPH_QA_EMAIL
10 02 * * 3,7  CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -s fs -e $CEPH_QA_EMAIL
15 02 * * 3,7  CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k testing -s krbd -e $CEPH_QA_EMAIL
20 02 * * 3,7  CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k testing -s kcephfs -e $CEPH_QA_EMAIL
25 02 * * 3,7  CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k testing -s knfs -e $CEPH_QA_EMAIL
30 02 * * 3,7  CEPH_BRANCH=jewel; MACHINE_NAME=mira;   /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -s hadoop -e $CEPH_QA_EMAIL
35 02 * * 3,7  CEPH_BRANCH=jewel; MACHINE_NAME=mira;   /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -s samba -e $CEPH_QA_EMAIL
40 02 * * 3,7  CEPH_BRANCH=jewel; MACHINE_NAME=mira;   /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -s rest -e $CEPH_QA_EMAIL
#removed per Greg's request 18 23 * * 4  CEPH_BRANCH=jewel; MACHINE_NAME=smithi; teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k testing -s multimds -e $CEPH_QA_EMAIL
45 02 * * 3,7  CEPH_BRANCH=jewel; MACHINE_NAME=smithi; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k distro -s upgrade/client-upgrade -e $CEPH_QA_EMAIL
50 02 * * 3,7  CEPH_BRANCH=jewel; MACHINE_NAME=mira; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k distro -s ceph-deploy -e $CEPH_QA_EMAIL
55 02 * * 2,4  CEPH_BRANCH=jewel; MACHINE_NAME=vps; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k distro -s ceph-deploy -e $CEPH_QA_EMAIL
10 03 * * 3,7  CEPH_BRANCH=jewel; MACHINE_NAME=mira; /home/teuthology/bin/cron_wrapper teuthology-suite -v -c $CEPH_BRANCH -m $MACHINE_NAME -k distro -s ceph-disk -e $CEPH_QA_EMAIL

## removed for now YuriW - 10 1 * * 1,3,5 teuthology-suite -v -c firefly -m vps -k distro -s ceph-deploy ~/vps.yaml
## berametal part of ceph-deploy/hammer/infernalis scheduled in typica lab
#moved to ovh 10 1 * * 7   teuthology-suite -v -c hammer -m vps -k distro -s ceph-deploy ~/vps.yaml --filter ubuntu_14
#moved to ovh 10 2 * * 7   teuthology-suite -v -c infernalis -m vps -k distro -s ceph-deploy ~/vps.yaml
#moved to ovh 10 2 * * 1,3 teuthology-suite -v -c jewel -m vps -k distro -s ceph-deploy ~/vps.yaml

## Upgrade tests for infernalis - running on bare metal 
#moved to ovh 02 17 * * 4 teuthology-suite -v -c infernalis -k distro -m plana,burnupi,mira -s upgrade/hammer-x --filter ubuntu_
#moved to ovh 02 17 * * 1 teuthology-suite -v -c jewel -k distro -m plana,burnupi,mira -s upgrade/hammer-x --filter ubuntu_

#********** all below on VPS *****************
#moved to ovh 00 17 * * 2,5 teuthology-suite -v -c firefly -k distro -m vps -s upgrade/firefly --timeout 56000  ~/vps.yaml

## upgrade dumpling to firefly tests  
#moved to ovh 13 19 * * 7 teuthology-suite -v -c firefly -k distro -m vps -s upgrade/dumpling-x ~/vps.yaml

# Upgrade tests for giant - running on vps to cover multiple distros
#giant EOL 15 18 * * 3,6 teuthology-suite -v -c giant -k distro -m vps -s upgrade/dumpling-firefly-x ~/vps.yaml
#giant EOL 18 18 * * 3,6 teuthology-suite -v -c giant -k distro -m vps -s upgrade/firefly-x ~/vps.yaml

#new suite for infernalis, has to run on vps when passed on baremetal
#moved to ovh 18 13 * * 2,4 teuthology-suite -v -c infernalis -k distro -m plana,burnupi,mira -s upgrade/firefly-hammer-x --filter ubuntu_14
#moved to ovh 18 13 * * 3,5,7 teuthology-suite -v -c infernalis -k distro -m plana,burnupi,mira -s upgrade/infernalis --filter ubuntu_14

# Upgrade tests for hammer - running on vps to cover multiple distros
#removed the line below b/c the suite 'upgrade/dumpling-firefly-x' on vps runs out of memory on hammer, YuriW 3/29/15
#15 17 * * 1,3 teuthology-suite -v -c hammer -k distro -m vps -s upgrade/dumpling-firefly-x --suite-branch hammer ~/vps.yaml 
#moved to ovh 18 17 * * 1,5 teuthology-suite -v -c hammer -k distro -m vps -s upgrade/firefly-x ~/vps.yaml
#giant EOL 05 17 * * 1,5 teuthology-suite -v -c hammer -k distro -m vps -s upgrade/giant-x ~/vps.yaml
#moved to ovh 05 16 * * 1,5 teuthology-suite -v -c hammer -k distro -m vps -s upgrade/hammer ~/vps.yaml

# Upgrade tests for infernalis - running on vps to cover multiple distros
#removed the line below b/c the suite 'upgrade/dumpling-firefly-x' on vps runs out of memory on infernalis, YuriW 3/29/15
#dumpling-firefly-x/infernalis is an invalid upgrade path
#15 17 * * 0,2,4,6 teuthology-suite -v -c infernalis -k distro -m vps -s upgrade/dumpling-firefly-x --suite-branch hammer ~/vps.yaml
05 18 * * 2,4,6   /home/teuthology/bin/cron_wrapper teuthology-suite -v -c infernalis -k distro -m vps -s upgrade/hammer-x ~/vps.yaml
10 18 * * 1,7     /home/teuthology/bin/cron_wrapper teuthology-suite -v -c infernalis -k distro -m vps -s upgrade/infernalis ~/vps.yaml
15 18 * * *       /home/teuthology/bin/cron_wrapper teuthology-suite -v -c jewel -k distro -m vps -s upgrade/hammer-x ~/vps.yaml
20 17 * * *       /home/teuthology/bin/cron_wrapper teuthology-suite -v -c jewel -k distro -m vps -s upgrade/infernalis-x ~/vps.yaml

20 04 * * *       /home/teuthology/bin/cron_wrapper teuthology-suite -v -c jewel -k distro -m vps -s upgrade/jewel-x ~/vps.yaml

#Ansible: suite: rados  branch: master
#15 17 * * 2,7 /home/teuthology/bin/cron_wrapper teuthology-suite -v -c master -m smithi -s rados  --subset $(echo "(($(date +%U) % 4) * 7) + CHANGE TO 0...6" | bc)/28 -e ceph-qa@ceph.com 
#Ansible: suite: fs  branch: master
#10 17 * * 1,6 /home/teuthology/bin/cron_wrapper teuthology-suite -v -c master -m smithi -s fs   -e ceph-qa@ceph.com 
#Ansible: suite: rgw  branch: master
#5 17 * * 1,6 /home/teuthology/bin/cron_wrapper teuthology-suite -v -c master -m smithi -s rgw -k distro  -e ceph-qa@ceph.com -p 100