monotone

monotone Mtn Source Tree

Root/contrib/perf-test.sh

  • Property mtn:execute set to true
1#!/bin/sh
2set -e
3if [ "$1" = "" ]; then
4 MONOTONE=`pwd`/monotone
5elif [ -x "$1" ]; then
6 MONOTONE="$1"
7else
8 echo "Usage: $0 [monotone-binary-to-test [test-to-run ...]]"
9 exit 1
10fi
11
12if [ ! -x $MONOTONE ]; then
13 echo "$MONOTONE doesn't exist?!"
14 exit 1
15fi
16
17PARSE_ACCOUNT=`pwd`/contrib/parse-accounting.pl
18if [ -x $PARSE_ACCOUNT ]; then
19 :
20elif [ -x `dirname $MONOTONE`/contrib/parse-accounting.pl ]; then
21 PARSE_ACCOUNT=`dirname $MONOTONE`/contrib/parse-accounting.pl
22else
23 echo "can't find parse-accounting.pl. Looked in `pwd`/contrib, and `dirname $MONOTONE`/contrib"
24 exit 1
25fi
26
27MONOTONE_DB=`cat MT/options | grep database | awk '{print $2}' | sed 's/^.//' | sed 's/.$//'`
28if [ -z "$MONOTONE_DB" -o ! -f "$MONOTONE_DB" ]; then
29 echo "Couldn't auto-determine monotone db?!"
30 exit 1
31fi
32[ -d /tmp/mt-perf-test ] || mkdir /tmp/mt-perf-test
33cd /tmp/mt-perf-test
34
35# figure out if binary has timing built in...
36ENABLE_MONOTONE_STATISTICS=1 $MONOTONE --help >timing-check.out 2>&1
37if [ `grep '^STATS: ' timing-check.out | wc -l` -gt 1 ]; then
38 MEASURE=
39 PIDFILE_ARG=
40 KILLBY=child
41 echo "Using builtin statistics..."
42else
43 MEASURE=time
44 PIDFILE_ARG=--pid-file=/tmp/mt-perf-test/pid-file
45 KILLBY=file
46 echo "Using external statistics..."
47fi
48
49[ -d staging ] || mkdir staging
50cd staging
51
52# Rebuild all of the various files for testing ...
53
54if [ ! -f random.large ]; then
55 echo "rebuilding random.large (this takes a long time)..."
56 dd if=/dev/urandom of=random.large-new bs=1024k count=100 >/dev/null 2>&1
57 mv random.large-new random.large
58fi
59
60for i in 0 1 2; do
61 for j in 0 1 2 3 4 5 6 7 8 9; do
62if [ ! -f random.medium.$i$j ]; then
63 echo "rebuilding random.medium.$i$j..."
64 dd if=/dev/urandom of=random.medium-new bs=1024k count=10 >/dev/null 2>&1
65 mv random.medium-new random.medium.$i$j
66fi
67 done
68done
69
70if [ ! -f halfzero.large ]; then
71 echo "rebuilding halfzero.large..."
72 dd if=/dev/zero of=halfzero.large-new bs=1024k count=50 >/dev/null 2>&1
73 dd if=/dev/urandom of=halfzero.large-new bs=1024k seek=50 count=50 >/dev/null 2>&1
74 mv halfzero.large-new halfzero.large
75fi
76
77if [ ! -d monotone ]; then
78 [ ! -d monotone-new ] || rm -rf monotone-new
79 mkdir monotone-new
80 # revisions 0.10 .. 0.22
81 for i in 713ed1966baced883ed865a931f97259522f90da fdc32bcc09e2714350fb514990bd26acb607264b 3cd6b8cc947ddab015fd945d3c305fc748bb6d0a 95a1a16c0941cc1ae51e9eb5d64d075ef35c5b19 fdf1335b4dfd8c1529fef8db58e5b819b03f7c8a 20b36b747dcce1230a6e7a0b1554bd7874f0fbe7 35da5df64546301d332303bbf63b6799d70932c8 e8c9e4eb0534a4c6e538935576330de34ec42052 168adf9537ff136c9b7fe7faad5991f92859390d 44ed8807bead656889fb5022f974e13a7169098c e65bc11b6670a0b2ed8e72214cb81d94e6a9a2d1 28058ae3e850229a5d8fae65415cbbf82b435377; do
82echo "checking out monotone rev $i..."
83$MONOTONE --db $MONOTONE_DB checkout --revision $i monotone-new/mt-$i
84 done
85 # Version 0.17, had to specify the branch explicitly to get checkout to work.
86 i=337d62e5cbd50c36e2f2c2bda489a98de3a8aeb7
87 echo "checking out monotone rev $i..."
88 $MONOTONE --db $MONOTONE_DB checkout --branch net.venge.monotone --revision $i monotone-new/mt-$i
89 mv monotone-new/mt-168adf9537ff136c9b7fe7faad5991f92859390d monotone-new/mt-0.19
90 rm -rf monotone-new/*/MT
91 mv monotone-new monotone
92fi
93
94cd /tmp/mt-perf-test
95if [ ! -d dbdir ]; then
96 [ ! -d dbdir-new ] || rm -rf dbdir-new
97 mkdir dbdir-new
98 cd dbdir-new
99 cat >monotonerc <<EOF
100function get_passphrase(keypair_id)
101 return "c2fba42ffaac67b09575c6db9139d5f6"
102end
103
104function get_netsync_read_permitted (collection, identity)
105 return true
106end
107
108function get_netsync_write_permitted (collection, identity)
109 return true
110end
111EOF
112 cat >keys <<EOF
113[pubkey perf-test-id]
114MIGdMA0GCSqGSIb3DQEBAQUAA4GLADCBhwKBgQCjEdB1Vr/Y8yYBKoeDXUzsyzEPJATFn4ve
1155LSD3I5qIDBffGDk39lAjsPyv28HFtNmQPqRcZSIHu4d3BYvlnLRaaIdqQuArl/NqcNXVmtY
116sY45ezC4MOeAP0PvbvmL97xgkDFjY5IjQ2fMSj6BPx7XXbJ/O5TGSxavZUWPKAs1HwIBEQ==
117[end]
118
119[privkey perf-test-id]
120xReoEfvdTW8awd4kNC4OUxuN9PlI3prLaupef+2lT5ve77vxqdKoe0EZkT+zXBCxEOykWaez
1217GTFHbTSWNr5N8r7RJ81kHkXVVoMIMuv6DK1lMNXXttsvpXBZR/M7H0UtJnylN/2PIqLkBlF
122TkXTKc0bd1enSbobbZBIsWFKENpUYQZsb7EmslPIVZiqWAXGAzHuIN49+WrrCssm1jzb5fEk
123IBF7k7tMnQ6EFlSzT9CdLDuafdDX9Lrpb4jB8qPL1VVx5uP/ENHqVgOvqKHRBSk0DiTfY8fs
1245n19cSQYDUDkc5N3AF7GRXD38tJEixpnSkMoRpiucL5NZ59vjgsO567B8V07Trr5D0ZXoUXz
125m2KZWyVrP/Qgq3aog+hRuJfNLfqDiVvCtm3ypaXV1mik7NBdKfe01TogzK3jx/XIiCxyH7Qn
126vq2UXLYjJ1QAOQrYY6YNtcouZvSO04B5H7V92uT19bs/yp7fZBk2LmqIMGs+Gbk0Yk9RS1gC
127N+YHZKsnO9YkqmlLFx8rCIvctGWo+iJhoTjl2aYIQ6nyL0HC3iM+kskzmzXj8HkJaL9edSqj
128iipd/ct8dR6058ntOJ9QvWnewS65lW1DjIfqPGDUyUWYHhJHNPe8auqredbKbLw7+FiahD46
129kUeKnALMLo5LUHDbFMWI+ezFk0Yr02WRhNFHTIP8ETrxlXlXWYnFL0h1yiuQtf0925N5pBh0
1305Ez+/JS6mkAxxoph9zqvrciXdub7LnBHTzW1SKv7lgxYiqF1fjd+pyP7ayJH59ZvckOPArpq
131vwTMSutpiF65z7aI+9hfSSAip4ySR1QQ/jFychhwwK/1B6aDR4lK
132[end]
133EOF
134 cd ..
135 mv dbdir-new dbdir
136fi
137
138load_zero_small() {
139 dd if=/dev/zero of=zero.small bs=1k count=10 >/dev/null 2>&1
140}
141
142load_zero_large() {
143 dd if=/dev/zero of=zero.large bs=1024k count=100 >/dev/null 2>&1
144}
145
146load_random_medium() {
147 cp -rp ../staging/random.medium.00 .
148}
149
150load_random_medium_20() {
151 cp -rp ../staging/random.medium.[01]? .
152}
153
154load_halfzero_large() {
155 cp -rp ../staging/halfzero.large .
156}
157
158load_random_large() {
159 cp -rp ../staging/random.large .
160}
161
162load_monotone() {
163 cp -rp ../staging/monotone/mt-0.19 .
164}
165
166load_mt_multiple() {
167 cp -rp ../staging/monotone .
168}
169
170load_mt_bigfiles() {
171 for i in ../staging/monotone/mt-*; do
172j=`basename $i`
173find $i -type f -exec cat '{}' \; >$j.txt
174 done
175}
176
177load_mixed() {
178 if [ "$1" = "" ]; then
179echo "Usage load_mixed #"
180exit 1
181 fi
182 RANDOM_LIST=(`ls ../staging/random.medium.??`)
183 MONOTONE_LIST=(`ls -d ../staging/monotone/mt-*`)
184 dd if=/dev/zero of=zero.med bs=1024k count=10 >/dev/null 2>&1
185 i=0;
186 j=0;
187 while [ $i -lt $1 ]; do
188MT_PATH=${MONOTONE_LIST[$i]}
189MT_NAME=`basename $MT_PATH`
190[ -d $MT_NAME ] || cp -rp $MT_PATH .
191cp ${RANDOM_LIST[$j]} $MT_NAME
192cat zero.med >>$MT_NAME/`basename ${RANDOM_LIST[$j]}`
193j=`expr $j + 1`
194cp ${RANDOM_LIST[$j]} $MT_NAME
195cat zero.med >>$MT_NAME/`basename ${RANDOM_LIST[$j]}`
196j=`expr $j + 1`
197i=`expr $i + 1`
198 done
199}
200
201load_mixed_1() {
202 load_mixed 1
203}
204
205load_mixed_4() {
206 load_mixed 4
207}
208
209load_mixed_12() {
210 load_mixed 12
211}
212
213load_everything() {
214 # print something out here even though it means removing
215 # from the output because it takes so long.
216 echo -n "load everything...";
217 load_zero_small
218 echo -n "."
219 load_zero_large
220 echo -n "."
221 load_random_medium_20
222 echo -n "."
223 load_halfzero_large
224 echo -n "."
225 load_random_large
226 echo -n "."
227 load_mt_multiple
228 echo -n "."
229 load_mt_bigfiles
230 echo -n "."
231 load_mixed_12
232 echo "."
233}
234
235prep_test() {
236 cd /tmp/mt-perf-test/dbdir
237 [ ! -f checkin.db ] || rm checkin.db
238 [ ! -f netsync.db ] || rm netsync.db
239 [ ! -f checkin.db-journal ] || rm checkin.db-journal
240 [ ! -f netsync.db-journal ] || rm netsync.db-journal
241
242 $MONOTONE --db checkin.db db init >/tmp/mt-perf-test/log 2>&1
243 $MONOTONE --db netsync.db db init >>/tmp/mt-perf-test/log 2>&1
244 cat keys | $MONOTONE --db checkin.db read >>/tmp/mt-perf-test/log 2>&1
245 cat keys | $MONOTONE --db netsync.db read >>/tmp/mt-perf-test/log 2>&1
246
247 cd /tmp/mt-perf-test
248 [ ! -d testdir ] || rm -rf testdir
249 $MONOTONE --db dbdir/checkin.db setup --branch test testdir >>/tmp/mt-perf-test/log 2>&1
250 cd testdir
251}
252
253dotest() {
254 prep_test
255 load_$1
256 export ENABLE_MONOTONE_STATISTICS=1
257 $MEASURE $MONOTONE add . >/tmp/mt-perf-test/add.log 2>&1
258 $PARSE_ACCOUNT "$1" "add files" /tmp/mt-perf-test/add.log
259
260 cp ../dbdir/monotonerc MT/monotonerc
261 $MEASURE $MONOTONE commit -m foo >/tmp/mt-perf-test/commit.log 2>&1
262 $PARSE_ACCOUNT "$1" "commit" /tmp/mt-perf-test/commit.log
263
264 cd ..
265 rm -rf testdir
266 $MEASURE $MONOTONE --db dbdir/checkin.db checkout --branch test testdir >/tmp/mt-perf-test/checkout.log 2>&1
267 $PARSE_ACCOUNT "$1" "checkout" /tmp/mt-perf-test/checkout.log
268
269 cd testdir
270 cp ../dbdir/monotonerc MT/monotonerc
271 [ -f /tmp/mt-perf-test/pid-file ] && rm /tmp/mt-perf-test/pid-file
272 $MEASURE $MONOTONE --db ../dbdir/checkin.db $PIDFILE_ARG serve localhost:7318 test >/tmp/mt-perf-test/serve.log 2>&1 &
273 SERVER=$!
274 sleep 1
275 $MEASURE $MONOTONE --db ../dbdir/netsync.db pull localhost:7318 test >/tmp/mt-perf-test/pull.log 2>&1
276 # SEGV here is intentional, it causes the server to exit through an
277 # assertion which prints out the accounting information
278 case $KILLBY in
279 child) kill -SEGV $SERVER ;;
280 file) kill -SEGV `cat /tmp/mt-perf-test/pid-file` ;;
281 *) echo "internal error, unknown killby '$KILLBY'" ;;
282 esac
283 wait $SERVER || true
284 $PARSE_ACCOUNT "$1" "serve" /tmp/mt-perf-test/serve.log
285 $PARSE_ACCOUNT "$1" "pull" /tmp/mt-perf-test/pull.log
286 echo
287 unset ENABLE_MONOTONE_STATISTICS
288}
289
290echo -n "Test CPU: "
291grep 'model name' /proc/cpuinfo | sed 's/model name.: //' | head -1
292$MONOTONE --version 2>&1 | grep 'base revision'
293ENABLE_MONOTONE_STATISTICS=1 $MEASURE $MONOTONE --help >/tmp/mt-perf-test/help.log 2>&1
294$PARSE_ACCOUNT header header /tmp/mt-perf-test/help.log
295if [ "$2" != "" ]; then
296 shift
297 while [ "$1" != "" ]; do
298dotest "$1"
299shift
300 done
301 exit 0
302fi
303dotest zero_small
304dotest zero_large
305dotest random_medium
306dotest random_medium_20
307dotest halfzero_large
308dotest random_large
309dotest monotone
310dotest mt_multiple
311dotest mt_bigfiles
312dotest mixed_1
313dotest mixed_4
314dotest mixed_12
315dotest everything
316

Archive Download this file

Branches

Tags

Quick Links:     www.monotone.ca    -     Downloads    -     Documentation    -     Wiki    -     Code Forge    -     Build Status