#!/bin/bash # usage benchbulk.sh dbname BULKSIZE=1000 DOCSIZE=100 INSERTS=20 DBURL="http://localhost:5984/$1" POSTURL="$DBURL/_bulk_docs" function make_bulk_docs() { ROW=0 SIZE=$(($1-1)) START=$2 BODYSIZE=$3 BODY=$(printf "%0${BODYSIZE}d") echo '{"docs":[' while [ $ROW -lt $SIZE ]; do printf '{"_id":"%020d", "body":"'$BODY'"},' $(($ROW + $START)) let ROW=ROW+1 done printf '{"_id":"%020d", "body":"'$BODY'"}' $(($ROW + $START)) echo ']}' } echo "Making $INSERTS bulk inserts of $BULKSIZE docs each" echo "Attempt to delete db at $DBURL" curl -X DELETE $DBURL -w\\n echo "Attempt to create db at $DBURL" curl -X PUT $DBURL -w\\n echo "Post to $POSTURL $INSERTS times" POSTS=0 while [ $POSTS -lt $INSERTS ]; do STARTKEY=$[ POSTS * BULKSIZE] echo "starkey $STARTKEY bulksize $BULKSIZE" # switch comments on the following two lines to see how it's bash that's the slow part # time echo $(make_bulk_docs $BULKSIZE $STARTKEY $DOCSIZE) echo $(make_bulk_docs $BULKSIZE $STARTKEY $DOCSIZE) | curl -T - -X POST $POSTURL -w%{http_code}\ %{time_total}\ sec\\n -o out.file 2> /dev/null & let POSTS=POSTS+1 done wait curl $DBURL -w\\n