View Javadoc

1   /**
2    *
3    * Licensed to the Apache Software Foundation (ASF) under one
4    * or more contributor license agreements.  See the NOTICE file
5    * distributed with this work for additional information
6    * regarding copyright ownership.  The ASF licenses this file
7    * to you under the Apache License, Version 2.0 (the
8    * "License"); you may not use this file except in compliance
9    * with the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  package org.apache.hadoop.hbase.rest;
20  
21  import java.io.DataInput;
22  import java.io.DataOutput;
23  import java.io.IOException;
24  import java.io.PrintStream;
25  import java.lang.reflect.Constructor;
26  import java.text.SimpleDateFormat;
27  import java.util.ArrayList;
28  import java.util.Arrays;
29  import java.util.Date;
30  import java.util.List;
31  import java.util.Map;
32  import java.util.Random;
33  import java.util.TreeMap;
34  import java.util.regex.Matcher;
35  import java.util.regex.Pattern;
36  
37  import org.apache.commons.logging.Log;
38  import org.apache.commons.logging.LogFactory;
39  import org.apache.hadoop.conf.Configuration;
40  import org.apache.hadoop.conf.Configured;
41  import org.apache.hadoop.fs.FSDataInputStream;
42  import org.apache.hadoop.fs.FileStatus;
43  import org.apache.hadoop.fs.FileSystem;
44  import org.apache.hadoop.fs.Path;
45  import org.apache.hadoop.hbase.HBaseConfiguration;
46  import org.apache.hadoop.hbase.HColumnDescriptor;
47  import org.apache.hadoop.hbase.HConstants;
48  import org.apache.hadoop.hbase.HTableDescriptor;
49  import org.apache.hadoop.hbase.KeyValue;
50  import org.apache.hadoop.hbase.TableName;
51  import org.apache.hadoop.hbase.Tag;
52  import org.apache.hadoop.hbase.client.BufferedMutator;
53  import org.apache.hadoop.hbase.client.Connection;
54  import org.apache.hadoop.hbase.client.ConnectionFactory;
55  import org.apache.hadoop.hbase.client.Durability;
56  import org.apache.hadoop.hbase.client.Get;
57  import org.apache.hadoop.hbase.client.Put;
58  import org.apache.hadoop.hbase.client.Result;
59  import org.apache.hadoop.hbase.client.ResultScanner;
60  import org.apache.hadoop.hbase.client.Scan;
61  import org.apache.hadoop.hbase.client.Table;
62  import org.apache.hadoop.hbase.filter.BinaryComparator;
63  import org.apache.hadoop.hbase.filter.CompareFilter;
64  import org.apache.hadoop.hbase.filter.Filter;
65  import org.apache.hadoop.hbase.filter.PageFilter;
66  import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
67  import org.apache.hadoop.hbase.filter.WhileMatchFilter;
68  import org.apache.hadoop.hbase.io.compress.Compression;
69  import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
70  import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
71  import org.apache.hadoop.hbase.rest.client.Client;
72  import org.apache.hadoop.hbase.rest.client.Cluster;
73  import org.apache.hadoop.hbase.rest.client.RemoteAdmin;
74  import org.apache.hadoop.hbase.util.Bytes;
75  import org.apache.hadoop.hbase.util.Hash;
76  import org.apache.hadoop.hbase.util.MurmurHash;
77  import org.apache.hadoop.hbase.util.Pair;
78  
79  import org.apache.hadoop.io.LongWritable;
80  import org.apache.hadoop.io.NullWritable;
81  import org.apache.hadoop.io.Text;
82  import org.apache.hadoop.io.Writable;
83  import org.apache.hadoop.mapreduce.InputSplit;
84  import org.apache.hadoop.mapreduce.Job;
85  import org.apache.hadoop.mapreduce.JobContext;
86  import org.apache.hadoop.mapreduce.Mapper;
87  import org.apache.hadoop.mapreduce.RecordReader;
88  import org.apache.hadoop.mapreduce.TaskAttemptContext;
89  import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
90  import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
91  import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
92  import org.apache.hadoop.util.LineReader;
93  import org.apache.hadoop.util.Tool;
94  import org.apache.hadoop.util.ToolRunner;
95  
96  /**
97   * Script used evaluating Stargate performance and scalability.  Runs a SG
98   * client that steps through one of a set of hardcoded tests or 'experiments'
99   * (e.g. a random reads test, a random writes test, etc.). Pass on the
100  * command-line which test to run and how many clients are participating in
101  * this experiment. Run <code>java PerformanceEvaluation --help</code> to
102  * obtain usage.
103  *
104  * <p>This class sets up and runs the evaluation programs described in
105  * Section 7, <i>Performance Evaluation</i>, of the <a
106  * href="http://labs.google.com/papers/bigtable.html">Bigtable</a>
107  * paper, pages 8-10.
108  *
109  * <p>If number of clients > 1, we start up a MapReduce job. Each map task
110  * runs an individual client. Each client does about 1GB of data.
111  */
112 public class PerformanceEvaluation extends Configured implements Tool {
113   protected static final Log LOG = LogFactory.getLog(PerformanceEvaluation.class.getName());
114 
115   private static final int DEFAULT_ROW_PREFIX_LENGTH = 16;
116   private static final int ROW_LENGTH = 1000;
117   private static final int TAG_LENGTH = 256;
118   private static final int ONE_GB = 1024 * 1024 * 1000;
119   private static final int ROWS_PER_GB = ONE_GB / ROW_LENGTH;
120 
121   public static final TableName TABLE_NAME = TableName.valueOf("TestTable");
122   public static final byte[] FAMILY_NAME = Bytes.toBytes("info");
123   public static final byte[] QUALIFIER_NAME = Bytes.toBytes("data");
124   private TableName tableName = TABLE_NAME;
125 
126   protected HTableDescriptor TABLE_DESCRIPTOR;
127   protected Map<String, CmdDescriptor> commands = new TreeMap<>();
128   protected static Cluster cluster = new Cluster();
129 
130   volatile Configuration conf;
131   private boolean nomapred = false;
132   private int N = 1;
133   private int R = ROWS_PER_GB;
134   private Compression.Algorithm compression = Compression.Algorithm.NONE;
135   private DataBlockEncoding blockEncoding = DataBlockEncoding.NONE;
136   private boolean flushCommits = true;
137   private boolean writeToWAL = true;
138   private boolean inMemoryCF = false;
139   private int presplitRegions = 0;
140   private boolean useTags = false;
141   private int noOfTags = 1;
142   private Connection connection;
143 
144   private static final Path PERF_EVAL_DIR = new Path("performance_evaluation");
145 
146   /**
147    * Regex to parse lines in input file passed to mapreduce task.
148    */
149   public static final Pattern LINE_PATTERN =
150       Pattern.compile("tableName=(\\w+),\\s+" +
151           "startRow=(\\d+),\\s+" +
152           "perClientRunRows=(\\d+),\\s+" +
153           "totalRows=(\\d+),\\s+" +
154           "clients=(\\d+),\\s+" +
155           "flushCommits=(\\w+),\\s+" +
156           "writeToWAL=(\\w+),\\s+" +
157           "useTags=(\\w+),\\s+" +
158           "noOfTags=(\\d+)");
159 
160   /**
161    * Enum for map metrics.  Keep it out here rather than inside in the Map
162    * inner-class so we can find associated properties.
163    */
164   protected enum Counter {
165     /** elapsed time */
166     ELAPSED_TIME,
167     /** number of rows */
168     ROWS
169   }
170 
171   /**
172    * Constructor
173    * @param c Configuration object
174    */
175   public PerformanceEvaluation(final Configuration c) {
176     this.conf = c;
177 
178     addCommandDescriptor(RandomReadTest.class, "randomRead",
179         "Run random read test");
180     addCommandDescriptor(RandomSeekScanTest.class, "randomSeekScan",
181         "Run random seek and scan 100 test");
182     addCommandDescriptor(RandomScanWithRange10Test.class, "scanRange10",
183         "Run random seek scan with both start and stop row (max 10 rows)");
184     addCommandDescriptor(RandomScanWithRange100Test.class, "scanRange100",
185         "Run random seek scan with both start and stop row (max 100 rows)");
186     addCommandDescriptor(RandomScanWithRange1000Test.class, "scanRange1000",
187         "Run random seek scan with both start and stop row (max 1000 rows)");
188     addCommandDescriptor(RandomScanWithRange10000Test.class, "scanRange10000",
189         "Run random seek scan with both start and stop row (max 10000 rows)");
190     addCommandDescriptor(RandomWriteTest.class, "randomWrite",
191         "Run random write test");
192     addCommandDescriptor(SequentialReadTest.class, "sequentialRead",
193         "Run sequential read test");
194     addCommandDescriptor(SequentialWriteTest.class, "sequentialWrite",
195         "Run sequential write test");
196     addCommandDescriptor(ScanTest.class, "scan",
197         "Run scan test (read every row)");
198     addCommandDescriptor(FilteredScanTest.class, "filterScan",
199         "Run scan test using a filter to find a specific row based " +
200         "on it's value (make sure to use --rows=20)");
201   }
202 
203   protected void addCommandDescriptor(Class<? extends Test> cmdClass,
204       String name, String description) {
205     CmdDescriptor cmdDescriptor = new CmdDescriptor(cmdClass, name, description);
206     commands.put(name, cmdDescriptor);
207   }
208 
209   /**
210    * Implementations can have their status set.
211    */
212   interface Status {
213     /**
214      * Sets status
215      * @param msg status message
216      * @throws IOException if setting the status fails
217      */
218     void setStatus(final String msg) throws IOException;
219   }
220 
221   /**
222    *  This class works as the InputSplit of Performance Evaluation
223    *  MapReduce InputFormat, and the Record Value of RecordReader.
224    *  Each map task will only read one record from a PeInputSplit,
225    *  the record value is the PeInputSplit itself.
226    */
227   public static class PeInputSplit extends InputSplit implements Writable {
228     private TableName tableName;
229     private int startRow;
230     private int rows;
231     private int totalRows;
232     private int clients;
233     private boolean flushCommits;
234     private boolean writeToWAL;
235     private boolean useTags;
236     private int noOfTags;
237 
238     public PeInputSplit(TableName tableName, int startRow, int rows, int totalRows, int clients,
239         boolean flushCommits, boolean writeToWAL, boolean useTags, int noOfTags) {
240       this.tableName = tableName;
241       this.startRow = startRow;
242       this.rows = rows;
243       this.totalRows = totalRows;
244       this.clients = clients;
245       this.flushCommits = flushCommits;
246       this.writeToWAL = writeToWAL;
247       this.useTags = useTags;
248       this.noOfTags = noOfTags;
249     }
250 
251     @Override
252     public void readFields(DataInput in) throws IOException {
253       int tableNameLen = in.readInt();
254       byte[] name = new byte[tableNameLen];
255       in.readFully(name);
256       this.tableName = TableName.valueOf(name);
257       this.startRow = in.readInt();
258       this.rows = in.readInt();
259       this.totalRows = in.readInt();
260       this.clients = in.readInt();
261       this.flushCommits = in.readBoolean();
262       this.writeToWAL = in.readBoolean();
263       this.useTags = in.readBoolean();
264       this.noOfTags = in.readInt();
265     }
266 
267     @Override
268     public void write(DataOutput out) throws IOException {
269       byte[] name = this.tableName.toBytes();
270       out.writeInt(name.length);
271       out.write(name);
272       out.writeInt(startRow);
273       out.writeInt(rows);
274       out.writeInt(totalRows);
275       out.writeInt(clients);
276       out.writeBoolean(flushCommits);
277       out.writeBoolean(writeToWAL);
278       out.writeBoolean(useTags);
279       out.writeInt(noOfTags);
280     }
281 
282     @Override
283     public long getLength() {
284       return 0;
285     }
286 
287     @Override
288     public String[] getLocations() {
289       return new String[0];
290     }
291 
292     public int getStartRow() {
293       return startRow;
294     }
295 
296     public TableName getTableName() {
297       return tableName;
298     }
299 
300     public int getRows() {
301       return rows;
302     }
303 
304     public int getTotalRows() {
305       return totalRows;
306     }
307 
308     public boolean isFlushCommits() {
309       return flushCommits;
310     }
311 
312     public boolean isWriteToWAL() {
313       return writeToWAL;
314     }
315 
316     public boolean isUseTags() {
317       return useTags;
318     }
319 
320     public int getNoOfTags() {
321       return noOfTags;
322     }
323   }
324 
325   /**
326    *  InputFormat of Performance Evaluation MapReduce job.
327    *  It extends from FileInputFormat, want to use it's methods such as setInputPaths().
328    */
329   public static class PeInputFormat extends FileInputFormat<NullWritable, PeInputSplit> {
330     @Override
331     public List<InputSplit> getSplits(JobContext job) throws IOException {
332       // generate splits
333       List<InputSplit> splitList = new ArrayList<>();
334 
335       for (FileStatus file : listStatus(job)) {
336         if (file.isDirectory()) {
337           continue;
338         }
339         Path path = file.getPath();
340         FileSystem fs = path.getFileSystem(job.getConfiguration());
341         FSDataInputStream fileIn = fs.open(path);
342         LineReader in = new LineReader(fileIn, job.getConfiguration());
343         int lineLen;
344         while (true) {
345           Text lineText = new Text();
346           lineLen = in.readLine(lineText);
347           if (lineLen <= 0) {
348             break;
349           }
350           Matcher m = LINE_PATTERN.matcher(lineText.toString());
351           if ((m != null) && m.matches()) {
352             TableName tableName = TableName.valueOf(m.group(1));
353             int startRow = Integer.parseInt(m.group(2));
354             int rows = Integer.parseInt(m.group(3));
355             int totalRows = Integer.parseInt(m.group(4));
356             int clients = Integer.parseInt(m.group(5));
357             boolean flushCommits = Boolean.parseBoolean(m.group(6));
358             boolean writeToWAL = Boolean.parseBoolean(m.group(7));
359             boolean useTags = Boolean.parseBoolean(m.group(8));
360             int noOfTags = Integer.parseInt(m.group(9));
361 
362             LOG.debug("tableName=" + tableName +
363                       " split["+ splitList.size() + "] " +
364                       " startRow=" + startRow +
365                       " rows=" + rows +
366                       " totalRows=" + totalRows +
367                       " clients=" + clients +
368                       " flushCommits=" + flushCommits +
369                       " writeToWAL=" + writeToWAL +
370                       " useTags=" + useTags +
371                       " noOfTags=" + noOfTags);
372 
373             PeInputSplit newSplit =
374               new PeInputSplit(tableName, startRow, rows, totalRows, clients,
375                   flushCommits, writeToWAL, useTags, noOfTags);
376             splitList.add(newSplit);
377           }
378         }
379         in.close();
380       }
381 
382       LOG.info("Total # of splits: " + splitList.size());
383       return splitList;
384     }
385 
386     @Override
387     public RecordReader<NullWritable, PeInputSplit> createRecordReader(InputSplit split,
388         TaskAttemptContext context) {
389       return new PeRecordReader();
390     }
391 
392     public static class PeRecordReader extends RecordReader<NullWritable, PeInputSplit> {
393       private boolean readOver = false;
394       private PeInputSplit split = null;
395       private NullWritable key = null;
396       private PeInputSplit value = null;
397 
398       @Override
399       public void initialize(InputSplit split, TaskAttemptContext context) {
400         this.readOver = false;
401         this.split = (PeInputSplit)split;
402       }
403 
404       @Override
405       public boolean nextKeyValue() {
406         if (readOver) {
407           return false;
408         }
409 
410         key = NullWritable.get();
411         value = split;
412 
413         readOver = true;
414         return true;
415       }
416 
417       @Override
418       public NullWritable getCurrentKey() {
419         return key;
420       }
421 
422       @Override
423       public PeInputSplit getCurrentValue() {
424         return value;
425       }
426 
427       @Override
428       public float getProgress() {
429         if (readOver) {
430           return 1.0f;
431         } else {
432           return 0.0f;
433         }
434       }
435 
436       @Override
437       public void close() {
438         // do nothing
439       }
440     }
441   }
442 
443   /**
444    * MapReduce job that runs a performance evaluation client in each map task.
445    */
446   public static class EvaluationMapTask
447       extends Mapper<NullWritable, PeInputSplit, LongWritable, LongWritable> {
448 
449     /** configuration parameter name that contains the command */
450     public final static String CMD_KEY = "EvaluationMapTask.command";
451     /** configuration parameter name that contains the PE impl */
452     public static final String PE_KEY = "EvaluationMapTask.performanceEvalImpl";
453 
454     private Class<? extends Test> cmd;
455     private PerformanceEvaluation pe;
456 
457     @Override
458     protected void setup(Context context) {
459       this.cmd = forName(context.getConfiguration().get(CMD_KEY), Test.class);
460 
461       // this is required so that extensions of PE are instantiated within the
462       // map reduce task...
463       Class<? extends PerformanceEvaluation> peClass =
464           forName(context.getConfiguration().get(PE_KEY), PerformanceEvaluation.class);
465       try {
466         this.pe = peClass.getConstructor(Configuration.class)
467             .newInstance(context.getConfiguration());
468       } catch (Exception e) {
469         throw new IllegalStateException("Could not instantiate PE instance", e);
470       }
471     }
472 
473     private <Type> Class<? extends Type> forName(String className, Class<Type> type) {
474       Class<? extends Type> clazz;
475       try {
476         clazz = Class.forName(className).asSubclass(type);
477       } catch (ClassNotFoundException e) {
478         throw new IllegalStateException("Could not find class for name: " + className, e);
479       }
480       return clazz;
481     }
482 
483     @Override
484     protected void map(NullWritable key, PeInputSplit value, final Context context)
485         throws IOException, InterruptedException {
486       Status status = new Status() {
487         @Override
488         public void setStatus(String msg) {
489           context.setStatus(msg);
490         }
491       };
492 
493       // Evaluation task
494       pe.tableName = value.getTableName();
495       long elapsedTime = this.pe.runOneClient(this.cmd, value.getStartRow(),
496         value.getRows(), value.getTotalRows(),
497         value.isFlushCommits(), value.isWriteToWAL(),
498         value.isUseTags(), value.getNoOfTags(),
499         ConnectionFactory.createConnection(context.getConfiguration()), status);
500       // Collect how much time the thing took. Report as map output and
501       // to the ELAPSED_TIME counter.
502       context.getCounter(Counter.ELAPSED_TIME).increment(elapsedTime);
503       context.getCounter(Counter.ROWS).increment(value.rows);
504       context.write(new LongWritable(value.startRow), new LongWritable(elapsedTime));
505       context.progress();
506     }
507   }
508 
509   /**
510    * If table does not already exist, create.
511    * @param admin Client to use checking.
512    * @return True if we created the table.
513    * @throws IOException if an operation on the table fails
514    */
515   private boolean checkTable(RemoteAdmin admin) throws IOException {
516     HTableDescriptor tableDescriptor = getTableDescriptor();
517     if (this.presplitRegions > 0) {
518       // presplit requested
519       if (admin.isTableAvailable(tableDescriptor.getTableName().getName())) {
520         admin.deleteTable(tableDescriptor.getTableName().getName());
521       }
522 
523       byte[][] splits = getSplits();
524       for (int i=0; i < splits.length; i++) {
525         LOG.debug(" split " + i + ": " + Bytes.toStringBinary(splits[i]));
526       }
527       admin.createTable(tableDescriptor);
528       LOG.info("Table created with " + this.presplitRegions + " splits");
529     } else {
530       boolean tableExists = admin.isTableAvailable(tableDescriptor.getTableName().getName());
531       if (!tableExists) {
532         admin.createTable(tableDescriptor);
533         LOG.info("Table " + tableDescriptor + " created");
534       }
535     }
536 
537     return admin.isTableAvailable(tableDescriptor.getTableName().getName());
538   }
539 
540   protected HTableDescriptor getTableDescriptor() {
541     if (TABLE_DESCRIPTOR == null) {
542       TABLE_DESCRIPTOR = new HTableDescriptor(tableName);
543       HColumnDescriptor family = new HColumnDescriptor(FAMILY_NAME);
544       family.setDataBlockEncoding(blockEncoding);
545       family.setCompressionType(compression);
546       if (inMemoryCF) {
547         family.setInMemory(true);
548       }
549       TABLE_DESCRIPTOR.addFamily(family);
550     }
551     return TABLE_DESCRIPTOR;
552   }
553 
554   /**
555    * Generates splits based on total number of rows and specified split regions
556    *
557    * @return splits : array of byte []
558    */
559   protected  byte[][] getSplits() {
560     if (this.presplitRegions == 0) {
561       return new byte[0][];
562     }
563 
564     int numSplitPoints = presplitRegions - 1;
565     byte[][] splits = new byte[numSplitPoints][];
566     int jump = this.R  / this.presplitRegions;
567     for (int i = 0; i < numSplitPoints; i++) {
568       int rowkey = jump * (1 + i);
569       splits[i] = format(rowkey);
570     }
571     return splits;
572   }
573 
574   /**
575    * We're to run multiple clients concurrently.  Setup a mapreduce job.  Run
576    * one map per client.  Then run a single reduce to sum the elapsed times.
577    * @param cmd Command to run.
578    */
579   private void runNIsMoreThanOne(final Class<? extends Test> cmd)
580       throws IOException, InterruptedException, ClassNotFoundException {
581     RemoteAdmin remoteAdmin = new RemoteAdmin(new Client(cluster), getConf());
582     checkTable(remoteAdmin);
583     if (nomapred) {
584       doMultipleClients(cmd);
585     } else {
586       doMapReduce(cmd);
587     }
588   }
589 
590   /**
591    * Run all clients in this vm each to its own thread.
592    * @param cmd Command to run
593    * @throws IOException if creating a connection fails
594    */
595   private void doMultipleClients(final Class<? extends Test> cmd) throws IOException {
596     final List<Thread> threads = new ArrayList<>(this.N);
597     final long[] timings = new long[this.N];
598     final int perClientRows = R/N;
599     final TableName tableName = this.tableName;
600     final DataBlockEncoding encoding = this.blockEncoding;
601     final boolean flushCommits = this.flushCommits;
602     final Compression.Algorithm compression = this.compression;
603     final boolean writeToWal = this.writeToWAL;
604     final int preSplitRegions = this.presplitRegions;
605     final boolean useTags = this.useTags;
606     final int numTags = this.noOfTags;
607     final Connection connection = ConnectionFactory.createConnection(getConf());
608     for (int i = 0; i < this.N; i++) {
609       final int index = i;
610       Thread t = new Thread("TestClient-" + i) {
611         @Override
612         public void run() {
613           super.run();
614           PerformanceEvaluation pe = new PerformanceEvaluation(getConf());
615           pe.tableName = tableName;
616           pe.blockEncoding = encoding;
617           pe.flushCommits = flushCommits;
618           pe.compression = compression;
619           pe.writeToWAL = writeToWal;
620           pe.presplitRegions = preSplitRegions;
621           pe.N = N;
622           pe.connection = connection;
623           pe.useTags = useTags;
624           pe.noOfTags = numTags;
625           try {
626             long elapsedTime = pe.runOneClient(cmd, index * perClientRows,
627                 perClientRows, R,
628                  flushCommits, writeToWAL, useTags, noOfTags, connection, new Status() {
629                    @Override
630                    public void setStatus(final String msg) {
631                      LOG.info("client-" + getName() + " " + msg);
632                    }
633                  });
634             timings[index] = elapsedTime;
635             LOG.info("Finished " + getName() + " in " + elapsedTime +
636               "ms writing " + perClientRows + " rows");
637           } catch (IOException e) {
638             throw new RuntimeException(e);
639           }
640         }
641       };
642       threads.add(t);
643     }
644     for (Thread t : threads) {
645       t.start();
646     }
647     for (Thread t : threads) {
648       while (t.isAlive()) {
649         try {
650           t.join();
651         } catch (InterruptedException e) {
652           LOG.debug("Interrupted, continuing" + e.toString());
653         }
654       }
655     }
656     final String test = cmd.getSimpleName();
657     LOG.info("[" + test + "] Summary of timings (ms): "
658              + Arrays.toString(timings));
659     Arrays.sort(timings);
660     long total = 0;
661     for (int i = 0; i < this.N; i++) {
662       total += timings[i];
663     }
664     LOG.info("[" + test + "]"
665              + "\tMin: " + timings[0] + "ms"
666              + "\tMax: " + timings[this.N - 1] + "ms"
667              + "\tAvg: " + (total / this.N) + "ms");
668   }
669 
670   /**
671    * Run a mapreduce job.  Run as many maps as asked-for clients.
672    * Before we start up the job, write out an input file with instruction
673    * per client regards which row they are to start on.
674    * @param cmd Command to run.
675    */
676   private void doMapReduce(final Class<? extends Test> cmd)
677       throws IOException, InterruptedException, ClassNotFoundException {
678     Configuration conf = getConf();
679     Path inputDir = writeInputFile(conf);
680     conf.set(EvaluationMapTask.CMD_KEY, cmd.getName());
681     conf.set(EvaluationMapTask.PE_KEY, getClass().getName());
682     Job job = Job.getInstance(conf);
683     job.setJarByClass(PerformanceEvaluation.class);
684     job.setJobName("HBase Performance Evaluation");
685 
686     job.setInputFormatClass(PeInputFormat.class);
687     PeInputFormat.setInputPaths(job, inputDir);
688 
689     job.setOutputKeyClass(LongWritable.class);
690     job.setOutputValueClass(LongWritable.class);
691 
692     job.setMapperClass(EvaluationMapTask.class);
693     job.setReducerClass(LongSumReducer.class);
694     job.setNumReduceTasks(1);
695 
696     job.setOutputFormatClass(TextOutputFormat.class);
697     TextOutputFormat.setOutputPath(job, new Path(inputDir.getParent(), "outputs"));
698     TableMapReduceUtil.addDependencyJars(job);
699     TableMapReduceUtil.initCredentials(job);
700     job.waitForCompletion(true);
701   }
702 
703   /**
704    * Write input file of offsets-per-client for the mapreduce job.
705    * @param c Configuration
706    * @return Directory that contains file written.
707    * @throws IOException if creating the directory or the file fails
708    */
709   private Path writeInputFile(final Configuration c) throws IOException {
710     SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmss");
711     Path jobdir = new Path(PERF_EVAL_DIR, formatter.format(new Date()));
712     Path inputDir = new Path(jobdir, "inputs");
713 
714     FileSystem fs = FileSystem.get(c);
715     fs.mkdirs(inputDir);
716     Path inputFile = new Path(inputDir, "input.txt");
717     // Make input random.
718     Map<Integer, String> m = new TreeMap<>();
719     Hash h = MurmurHash.getInstance();
720     int perClientRows = (this.R / this.N);
721     try (PrintStream out = new PrintStream(fs.create(inputFile))) {
722       for (int i = 0; i < 10; i++) {
723         for (int j = 0; j < N; j++) {
724           StringBuilder s = new StringBuilder();
725           s.append("tableName=").append(tableName);
726           s.append(", startRow=").append((j * perClientRows) + (i * (perClientRows / 10)));
727           s.append(", perClientRunRows=").append(perClientRows / 10);
728           s.append(", totalRows=").append(R);
729           s.append(", clients=").append(N);
730           s.append(", flushCommits=").append(flushCommits);
731           s.append(", writeToWAL=").append(writeToWAL);
732           s.append(", useTags=").append(useTags);
733           s.append(", noOfTags=").append(noOfTags);
734           int hash = h.hash(Bytes.toBytes(s.toString()));
735           m.put(hash, s.toString());
736         }
737       }
738       for (Map.Entry<Integer, String> e : m.entrySet()) {
739         out.println(e.getValue());
740       }
741     }
742     return inputDir;
743   }
744 
745   /**
746    * Describes a command.
747    */
748   static class CmdDescriptor {
749     private Class<? extends Test> cmdClass;
750     private String name;
751     private String description;
752 
753     CmdDescriptor(Class<? extends Test> cmdClass, String name, String description) {
754       this.cmdClass = cmdClass;
755       this.name = name;
756       this.description = description;
757     }
758 
759     public Class<? extends Test> getCmdClass() {
760       return cmdClass;
761     }
762 
763     public String getName() {
764       return name;
765     }
766 
767     public String getDescription() {
768       return description;
769     }
770   }
771 
772   /**
773    * Wraps up options passed to {@link org.apache.hadoop.hbase.PerformanceEvaluation} tests
774    * This makes the reflection logic a little easier to understand...
775    */
776   static class TestOptions {
777     private int startRow;
778     private int perClientRunRows;
779     private int totalRows;
780     private TableName tableName;
781     private boolean flushCommits;
782     private boolean writeToWAL;
783     private boolean useTags;
784     private int noOfTags;
785     private Connection connection;
786 
787     TestOptions(int startRow, int perClientRunRows, int totalRows, TableName tableName,
788         boolean flushCommits, boolean writeToWAL, boolean useTags,
789         int noOfTags, Connection connection) {
790       this.startRow = startRow;
791       this.perClientRunRows = perClientRunRows;
792       this.totalRows = totalRows;
793       this.tableName = tableName;
794       this.flushCommits = flushCommits;
795       this.writeToWAL = writeToWAL;
796       this.useTags = useTags;
797       this.noOfTags = noOfTags;
798       this.connection = connection;
799     }
800 
801     public int getStartRow() {
802       return startRow;
803     }
804 
805     public int getPerClientRunRows() {
806       return perClientRunRows;
807     }
808 
809     public int getTotalRows() {
810       return totalRows;
811     }
812 
813     public TableName getTableName() {
814       return tableName;
815     }
816 
817     public boolean isFlushCommits() {
818       return flushCommits;
819     }
820 
821     public boolean isWriteToWAL() {
822       return writeToWAL;
823     }
824 
825     public Connection getConnection() {
826       return connection;
827     }
828 
829     public boolean isUseTags() {
830       return this.useTags;
831     }
832 
833     public int getNumTags() {
834       return this.noOfTags;
835     }
836   }
837 
838   /*
839    * A test.
840    * Subclass to particularize what happens per row.
841    */
842   static abstract class Test {
843     // Below is make it so when Tests are all running in the one
844     // jvm, that they each have a differently seeded Random.
845     private static final Random randomSeed =
846       new Random(System.currentTimeMillis());
847     private static long nextRandomSeed() {
848       return randomSeed.nextLong();
849     }
850     protected final Random rand = new Random(nextRandomSeed());
851 
852     protected final int startRow;
853     protected final int perClientRunRows;
854     protected final int totalRows;
855     private final Status status;
856     protected TableName tableName;
857     protected volatile Configuration conf;
858     protected boolean writeToWAL;
859     protected boolean useTags;
860     protected int noOfTags;
861     protected Connection connection;
862 
863     /**
864      * Note that all subclasses of this class must provide a public contructor
865      * that has the exact same list of arguments.
866      */
867     Test(final Configuration conf, final TestOptions options, final Status status) {
868       super();
869       this.startRow = options.getStartRow();
870       this.perClientRunRows = options.getPerClientRunRows();
871       this.totalRows = options.getTotalRows();
872       this.status = status;
873       this.tableName = options.getTableName();
874       this.conf = conf;
875       this.writeToWAL = options.isWriteToWAL();
876       this.useTags = options.isUseTags();
877       this.noOfTags = options.getNumTags();
878       this.connection = options.getConnection();
879     }
880 
881     protected String generateStatus(final int sr, final int i, final int lr) {
882       return sr + "/" + i + "/" + lr;
883     }
884 
885     protected int getReportingPeriod() {
886       int period = this.perClientRunRows / 10;
887       return period == 0? this.perClientRunRows: period;
888     }
889 
890     abstract void testTakedown()  throws IOException;
891 
892     /**
893      * Run test
894      * @return Elapsed time.
895      * @throws IOException if something in the test fails
896      */
897     long test() throws IOException {
898       testSetup();
899       LOG.info("Timed test starting in thread " + Thread.currentThread().getName());
900       final long startTime = System.nanoTime();
901       try {
902         testTimed();
903       } finally {
904         testTakedown();
905       }
906       return (System.nanoTime() - startTime) / 1000000;
907     }
908 
909     abstract void testSetup() throws IOException;
910 
911     /**
912      * Provides an extension point for tests that don't want a per row invocation.
913      */
914     void testTimed() throws IOException {
915       int lastRow = this.startRow + this.perClientRunRows;
916       // Report on completion of 1/10th of total.
917       for (int i = this.startRow; i < lastRow; i++) {
918         testRow(i);
919         if (status != null && i > 0 && (i % getReportingPeriod()) == 0) {
920           status.setStatus(generateStatus(this.startRow, i, lastRow));
921         }
922       }
923     }
924 
925     /**
926      * Test for individual row.
927      * @param i Row index.
928      */
929     abstract void testRow(final int i) throws IOException;
930   }
931 
932   static abstract class TableTest extends Test {
933     protected Table table;
934 
935     public TableTest(Configuration conf, TestOptions options, Status status) {
936       super(conf, options, status);
937     }
938 
939     @Override
940     void testSetup() throws IOException {
941       this.table = connection.getTable(tableName);
942     }
943 
944     @Override
945     void testTakedown() throws IOException {
946       table.close();
947     }
948   }
949 
950   static abstract class BufferedMutatorTest extends Test {
951     protected BufferedMutator mutator;
952     protected boolean flushCommits;
953 
954     public BufferedMutatorTest(Configuration conf, TestOptions options, Status status) {
955       super(conf, options, status);
956       this.flushCommits = options.isFlushCommits();
957     }
958 
959     @Override
960     void testSetup() throws IOException {
961       this.mutator = connection.getBufferedMutator(tableName);
962     }
963 
964     @Override
965     void testTakedown()  throws IOException {
966       if (flushCommits) {
967         this.mutator.flush();
968       }
969       mutator.close();
970     }
971   }
972 
973   static class RandomSeekScanTest extends TableTest {
974     RandomSeekScanTest(Configuration conf, TestOptions options, Status status) {
975       super(conf, options, status);
976     }
977 
978     @Override
979     void testRow(final int i) throws IOException {
980       Scan scan = new Scan(getRandomRow(this.rand, this.totalRows));
981       scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
982       scan.setFilter(new WhileMatchFilter(new PageFilter(120)));
983       ResultScanner s = this.table.getScanner(scan);
984       s.close();
985     }
986 
987     @Override
988     protected int getReportingPeriod() {
989       int period = this.perClientRunRows / 100;
990       return period == 0? this.perClientRunRows: period;
991     }
992   }
993 
994   @SuppressWarnings("unused")
995   static abstract class RandomScanWithRangeTest extends TableTest {
996     RandomScanWithRangeTest(Configuration conf, TestOptions options, Status status) {
997       super(conf, options, status);
998     }
999 
1000     @Override
1001     void testRow(final int i) throws IOException {
1002       Pair<byte[], byte[]> startAndStopRow = getStartAndStopRow();
1003       Scan scan = new Scan(startAndStopRow.getFirst(), startAndStopRow.getSecond());
1004       scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
1005       ResultScanner s = this.table.getScanner(scan);
1006       int count = 0;
1007       for (Result rr = null; (rr = s.next()) != null;) {
1008         count++;
1009       }
1010 
1011       if (i % 100 == 0) {
1012         LOG.info(String.format("Scan for key range %s - %s returned %s rows",
1013             Bytes.toString(startAndStopRow.getFirst()),
1014             Bytes.toString(startAndStopRow.getSecond()), count));
1015       }
1016 
1017       s.close();
1018     }
1019 
1020     protected abstract Pair<byte[], byte[]> getStartAndStopRow();
1021 
1022     protected Pair<byte[], byte[]> generateStartAndStopRows(int maxRange) {
1023       int start = this.rand.nextInt(Integer.MAX_VALUE) % totalRows;
1024       int stop = start + maxRange;
1025       return new Pair<>(format(start), format(stop));
1026     }
1027 
1028     @Override
1029     protected int getReportingPeriod() {
1030       int period = this.perClientRunRows / 100;
1031       return period == 0? this.perClientRunRows: period;
1032     }
1033   }
1034 
1035   static class RandomScanWithRange10Test extends RandomScanWithRangeTest {
1036     RandomScanWithRange10Test(Configuration conf, TestOptions options, Status status) {
1037       super(conf, options, status);
1038     }
1039 
1040     @Override
1041     protected Pair<byte[], byte[]> getStartAndStopRow() {
1042       return generateStartAndStopRows(10);
1043     }
1044   }
1045 
1046   static class RandomScanWithRange100Test extends RandomScanWithRangeTest {
1047     RandomScanWithRange100Test(Configuration conf, TestOptions options, Status status) {
1048       super(conf, options, status);
1049     }
1050 
1051     @Override
1052     protected Pair<byte[], byte[]> getStartAndStopRow() {
1053       return generateStartAndStopRows(100);
1054     }
1055   }
1056 
1057   static class RandomScanWithRange1000Test extends RandomScanWithRangeTest {
1058     RandomScanWithRange1000Test(Configuration conf, TestOptions options, Status status) {
1059       super(conf, options, status);
1060     }
1061 
1062     @Override
1063     protected Pair<byte[], byte[]> getStartAndStopRow() {
1064       return generateStartAndStopRows(1000);
1065     }
1066   }
1067 
1068   static class RandomScanWithRange10000Test extends RandomScanWithRangeTest {
1069     RandomScanWithRange10000Test(Configuration conf, TestOptions options, Status status) {
1070       super(conf, options, status);
1071     }
1072 
1073     @Override
1074     protected Pair<byte[], byte[]> getStartAndStopRow() {
1075       return generateStartAndStopRows(10000);
1076     }
1077   }
1078 
1079   static class RandomReadTest extends TableTest {
1080     RandomReadTest(Configuration conf, TestOptions options, Status status) {
1081       super(conf, options, status);
1082     }
1083 
1084     @Override
1085     void testRow(final int i) throws IOException {
1086       Get get = new Get(getRandomRow(this.rand, this.totalRows));
1087       get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
1088       this.table.get(get);
1089     }
1090 
1091     @Override
1092     protected int getReportingPeriod() {
1093       int period = this.perClientRunRows / 100;
1094       return period == 0? this.perClientRunRows: period;
1095     }
1096   }
1097 
1098   static class RandomWriteTest extends BufferedMutatorTest {
1099     RandomWriteTest(Configuration conf, TestOptions options, Status status) {
1100       super(conf, options, status);
1101     }
1102 
1103     @Override
1104     void testRow(final int i) throws IOException {
1105       byte[] row = getRandomRow(this.rand, this.totalRows);
1106       Put put = new Put(row);
1107       byte[] value = generateData(this.rand, ROW_LENGTH);
1108       if (useTags) {
1109         byte[] tag = generateData(this.rand, TAG_LENGTH);
1110         Tag[] tags = new Tag[noOfTags];
1111         for (int n = 0; n < noOfTags; n++) {
1112           Tag t = new Tag((byte) n, tag);
1113           tags[n] = t;
1114         }
1115         KeyValue kv = new KeyValue(row, FAMILY_NAME, QUALIFIER_NAME, HConstants.LATEST_TIMESTAMP,
1116             value, tags);
1117         put.add(kv);
1118       } else {
1119         put.add(FAMILY_NAME, QUALIFIER_NAME, value);
1120       }
1121       put.setDurability(writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
1122       mutator.mutate(put);
1123     }
1124   }
1125 
1126   static class ScanTest extends TableTest {
1127     private ResultScanner testScanner;
1128 
1129     ScanTest(Configuration conf, TestOptions options, Status status) {
1130       super(conf, options, status);
1131     }
1132 
1133     @Override
1134     void testTakedown() throws IOException {
1135       if (this.testScanner != null) {
1136         this.testScanner.close();
1137       }
1138       super.testTakedown();
1139     }
1140 
1141     @Override
1142     void testRow(final int i) throws IOException {
1143       if (this.testScanner == null) {
1144         Scan scan = new Scan(format(this.startRow));
1145         scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
1146         this.testScanner = table.getScanner(scan);
1147       }
1148       testScanner.next();
1149     }
1150   }
1151 
1152   static class SequentialReadTest extends TableTest {
1153     SequentialReadTest(Configuration conf, TestOptions options, Status status) {
1154       super(conf, options, status);
1155     }
1156 
1157     @Override
1158     void testRow(final int i) throws IOException {
1159       Get get = new Get(format(i));
1160       get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
1161       table.get(get);
1162     }
1163   }
1164 
1165   static class SequentialWriteTest extends BufferedMutatorTest {
1166     SequentialWriteTest(Configuration conf, TestOptions options, Status status) {
1167       super(conf, options, status);
1168     }
1169 
1170     @Override
1171     void testRow(final int i) throws IOException {
1172       byte[] row = format(i);
1173       Put put = new Put(row);
1174       byte[] value = generateData(this.rand, ROW_LENGTH);
1175       if (useTags) {
1176         byte[] tag = generateData(this.rand, TAG_LENGTH);
1177         Tag[] tags = new Tag[noOfTags];
1178         for (int n = 0; n < noOfTags; n++) {
1179           Tag t = new Tag((byte) n, tag);
1180           tags[n] = t;
1181         }
1182         KeyValue kv = new KeyValue(row, FAMILY_NAME, QUALIFIER_NAME, HConstants.LATEST_TIMESTAMP,
1183             value, tags);
1184         put.add(kv);
1185       } else {
1186         put.add(FAMILY_NAME, QUALIFIER_NAME, value);
1187       }
1188       put.setDurability(writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
1189       mutator.mutate(put);
1190     }
1191   }
1192 
1193   static class FilteredScanTest extends TableTest {
1194     protected static final Log LOG = LogFactory.getLog(FilteredScanTest.class.getName());
1195 
1196     FilteredScanTest(Configuration conf, TestOptions options, Status status) {
1197       super(conf, options, status);
1198     }
1199 
1200     @Override
1201     void testRow(int i) throws IOException {
1202       byte[] value = generateValue(this.rand);
1203       Scan scan = constructScan(value);
1204       try (ResultScanner scanner = this.table.getScanner(scan)) {
1205         while (scanner.next() != null) {
1206         }
1207       }
1208     }
1209 
1210     protected Scan constructScan(byte[] valuePrefix) {
1211       Filter filter = new SingleColumnValueFilter(
1212           FAMILY_NAME, QUALIFIER_NAME, CompareFilter.CompareOp.EQUAL,
1213           new BinaryComparator(valuePrefix)
1214       );
1215       Scan scan = new Scan();
1216       scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
1217       scan.setFilter(filter);
1218       return scan;
1219     }
1220   }
1221 
1222   /**
1223    * Format passed integer.
1224    * @param number the integer to format
1225    * @return Returns zero-prefixed 10-byte wide decimal version of passed number (Does absolute in
1226    *    case number is negative).
1227    */
1228   public static byte [] format(final int number) {
1229     byte[] b = new byte[DEFAULT_ROW_PREFIX_LENGTH + 10];
1230     int d = Math.abs(number);
1231     for (int i = b.length - 1; i >= 0; i--) {
1232       b[i] = (byte)((d % 10) + '0');
1233       d /= 10;
1234     }
1235     return b;
1236   }
1237 
1238   public static byte[] generateData(final Random r, int length) {
1239     byte[] b = new byte [length];
1240     int i;
1241 
1242     for (i = 0; i < (length-8); i += 8) {
1243       b[i] = (byte) (65 + r.nextInt(26));
1244       b[i+1] = b[i];
1245       b[i+2] = b[i];
1246       b[i+3] = b[i];
1247       b[i+4] = b[i];
1248       b[i+5] = b[i];
1249       b[i+6] = b[i];
1250       b[i+7] = b[i];
1251     }
1252 
1253     byte a = (byte) (65 + r.nextInt(26));
1254     for (; i < length; i++) {
1255       b[i] = a;
1256     }
1257     return b;
1258   }
1259 
1260   public static byte[] generateValue(final Random r) {
1261     byte [] b = new byte [ROW_LENGTH];
1262     r.nextBytes(b);
1263     return b;
1264   }
1265 
1266   static byte[] getRandomRow(final Random random, final int totalRows) {
1267     return format(random.nextInt(Integer.MAX_VALUE) % totalRows);
1268   }
1269 
1270   long runOneClient(final Class<? extends Test> cmd, final int startRow,
1271       final int perClientRunRows, final int totalRows,
1272       boolean flushCommits, boolean writeToWAL, boolean useTags, int noOfTags,
1273       Connection connection, final Status status) throws IOException {
1274     status.setStatus("Start " + cmd + " at offset " + startRow + " for " +
1275       perClientRunRows + " rows");
1276     long totalElapsedTime;
1277 
1278     TestOptions options = new TestOptions(startRow, perClientRunRows,
1279       totalRows, tableName, flushCommits, writeToWAL, useTags, noOfTags, connection);
1280     final Test t;
1281     try {
1282       Constructor<? extends Test> constructor = cmd.getDeclaredConstructor(
1283           Configuration.class, TestOptions.class, Status.class);
1284       t = constructor.newInstance(this.conf, options, status);
1285     } catch (NoSuchMethodException e) {
1286       throw new IllegalArgumentException("Invalid command class: " +
1287           cmd.getName() + ".  It does not provide a constructor as described by" +
1288           "the javadoc comment.  Available constructors are: " +
1289           Arrays.toString(cmd.getConstructors()));
1290     } catch (Exception e) {
1291       throw new IllegalStateException("Failed to construct command class", e);
1292     }
1293     totalElapsedTime = t.test();
1294 
1295     status.setStatus("Finished " + cmd + " in " + totalElapsedTime +
1296       "ms at offset " + startRow + " for " + perClientRunRows + " rows");
1297     return totalElapsedTime;
1298   }
1299 
1300   private void runNIsOne(final Class<? extends Test> cmd) {
1301     Status status = new Status() {
1302       @Override
1303       public void setStatus(String msg) {
1304         LOG.info(msg);
1305       }
1306     };
1307 
1308     RemoteAdmin admin;
1309     try {
1310       Client client = new Client(cluster);
1311       admin = new RemoteAdmin(client, getConf());
1312       checkTable(admin);
1313       runOneClient(cmd, 0, this.R, this.R, this.flushCommits, this.writeToWAL,
1314         this.useTags, this.noOfTags, this.connection, status);
1315     } catch (Exception e) {
1316       LOG.error("Failed", e);
1317     }
1318   }
1319 
1320   private void runTest(final Class<? extends Test> cmd)
1321       throws IOException, InterruptedException, ClassNotFoundException {
1322     if (N == 1) {
1323       // If there is only one client and one HRegionServer, we assume nothing
1324       // has been set up at all.
1325       runNIsOne(cmd);
1326     } else {
1327       // Else, run
1328       runNIsMoreThanOne(cmd);
1329     }
1330   }
1331 
1332   protected void printUsage() {
1333     printUsage(null);
1334   }
1335 
1336   protected void printUsage(final String message) {
1337     if (message != null && message.length() > 0) {
1338       System.err.println(message);
1339     }
1340     System.err.println("Usage: java " + this.getClass().getName() + " \\");
1341     System.err.println("  [--nomapred] [--rows=ROWS] [--table=NAME] \\");
1342     System.err.println("  [--compress=TYPE] [--blockEncoding=TYPE] " +
1343       "[-D<property=value>]* <command> <nclients>");
1344     System.err.println();
1345     System.err.println("Options:");
1346     System.err.println(" nomapred        Run multiple clients using threads " +
1347       "(rather than use mapreduce)");
1348     System.err.println(" rows            Rows each client runs. Default: One million");
1349     System.err.println(" table           Alternate table name. Default: 'TestTable'");
1350     System.err.println(" compress        Compression type to use (GZ, LZO, ...). Default: 'NONE'");
1351     System.err.println(" flushCommits    Used to determine if the test should flush the table. " +
1352       "Default: false");
1353     System.err.println(" writeToWAL      Set writeToWAL on puts. Default: True");
1354     System.err.println(" presplit        Create presplit table. Recommended for accurate perf " +
1355       "analysis (see guide).  Default: disabled");
1356     System.err.println(" inmemory        Tries to keep the HFiles of the CF inmemory as far as " +
1357       "possible.  Not guaranteed that reads are always served from inmemory.  Default: false");
1358     System.err.println(" usetags         Writes tags along with KVs.  Use with HFile V3. " +
1359       "Default : false");
1360     System.err.println(" numoftags        Specify the no of tags that would be needed. " +
1361       "This works only if usetags is true.");
1362     System.err.println();
1363     System.err.println(" Note: -D properties will be applied to the conf used. ");
1364     System.err.println("  For example: ");
1365     System.err.println("   -Dmapreduce.output.fileoutputformat.compress=true");
1366     System.err.println("   -Dmapreduce.task.timeout=60000");
1367     System.err.println();
1368     System.err.println("Command:");
1369     for (CmdDescriptor command : commands.values()) {
1370       System.err.println(String.format(" %-15s %s", command.getName(), command.getDescription()));
1371     }
1372     System.err.println();
1373     System.err.println("Args:");
1374     System.err.println(" nclients      Integer. Required. Total number of " +
1375       "clients (and HRegionServers)");
1376     System.err.println("               running: 1 <= value <= 500");
1377     System.err.println("Examples:");
1378     System.err.println(" To run a single evaluation client:");
1379     System.err.println(" $ bin/hbase " + this.getClass().getName()
1380         + " sequentialWrite 1");
1381   }
1382 
1383   private void getArgs(final int start, final String[] args) {
1384     if (start + 1 > args.length) {
1385       throw new IllegalArgumentException("must supply the number of clients");
1386     }
1387     N = Integer.parseInt(args[start]);
1388     if (N < 1) {
1389       throw new IllegalArgumentException("Number of clients must be > 1");
1390     }
1391     // Set total number of rows to write.
1392     R = R * N;
1393   }
1394 
1395   @Override
1396   public int run(String[] args) throws Exception {
1397     // Process command-line args. TODO: Better cmd-line processing
1398     // (but hopefully something not as painful as cli options).
1399     int errCode = -1;
1400     if (args.length < 1) {
1401       printUsage();
1402       return errCode;
1403     }
1404 
1405     try {
1406       for (int i = 0; i < args.length; i++) {
1407         String cmd = args[i];
1408         if (cmd.equals("-h") || cmd.startsWith("--h")) {
1409           printUsage();
1410           errCode = 0;
1411           break;
1412         }
1413 
1414         final String nmr = "--nomapred";
1415         if (cmd.startsWith(nmr)) {
1416           nomapred = true;
1417           continue;
1418         }
1419 
1420         final String rows = "--rows=";
1421         if (cmd.startsWith(rows)) {
1422           R = Integer.parseInt(cmd.substring(rows.length()));
1423           continue;
1424         }
1425 
1426         final String table = "--table=";
1427         if (cmd.startsWith(table)) {
1428           this.tableName = TableName.valueOf(cmd.substring(table.length()));
1429           continue;
1430         }
1431 
1432         final String compress = "--compress=";
1433         if (cmd.startsWith(compress)) {
1434           this.compression = Compression.Algorithm.valueOf(cmd.substring(compress.length()));
1435           continue;
1436         }
1437 
1438         final String blockEncoding = "--blockEncoding=";
1439         if (cmd.startsWith(blockEncoding)) {
1440           this.blockEncoding = DataBlockEncoding.valueOf(cmd.substring(blockEncoding.length()));
1441           continue;
1442         }
1443 
1444         final String flushCommits = "--flushCommits=";
1445         if (cmd.startsWith(flushCommits)) {
1446           this.flushCommits = Boolean.parseBoolean(cmd.substring(flushCommits.length()));
1447           continue;
1448         }
1449 
1450         final String writeToWAL = "--writeToWAL=";
1451         if (cmd.startsWith(writeToWAL)) {
1452           this.writeToWAL = Boolean.parseBoolean(cmd.substring(writeToWAL.length()));
1453           continue;
1454         }
1455 
1456         final String presplit = "--presplit=";
1457         if (cmd.startsWith(presplit)) {
1458           this.presplitRegions = Integer.parseInt(cmd.substring(presplit.length()));
1459           continue;
1460         }
1461 
1462         final String inMemory = "--inmemory=";
1463         if (cmd.startsWith(inMemory)) {
1464           this.inMemoryCF = Boolean.parseBoolean(cmd.substring(inMemory.length()));
1465           continue;
1466         }
1467 
1468         this.connection = ConnectionFactory.createConnection(getConf());
1469 
1470         final String useTags = "--usetags=";
1471         if (cmd.startsWith(useTags)) {
1472           this.useTags = Boolean.parseBoolean(cmd.substring(useTags.length()));
1473           continue;
1474         }
1475 
1476         final String noOfTags = "--nooftags=";
1477         if (cmd.startsWith(noOfTags)) {
1478           this.noOfTags = Integer.parseInt(cmd.substring(noOfTags.length()));
1479           continue;
1480         }
1481 
1482         final String host = "--host=";
1483         if (cmd.startsWith(host)) {
1484           cluster.add(cmd.substring(host.length()));
1485           continue;
1486         }
1487 
1488         Class<? extends Test> cmdClass = determineCommandClass(cmd);
1489         if (cmdClass != null) {
1490           getArgs(i + 1, args);
1491           if (cluster.isEmpty()) {
1492             String s = conf.get("stargate.hostname", "localhost");
1493             if (s.contains(":")) {
1494               cluster.add(s);
1495             } else {
1496               cluster.add(s, conf.getInt("stargate.port", 8080));
1497             }
1498           }
1499           runTest(cmdClass);
1500           errCode = 0;
1501           break;
1502         }
1503 
1504         printUsage();
1505         break;
1506       }
1507     } catch (Exception e) {
1508       LOG.error("Failed", e);
1509     }
1510 
1511     return errCode;
1512   }
1513 
1514   private Class<? extends Test> determineCommandClass(String cmd) {
1515     CmdDescriptor descriptor = commands.get(cmd);
1516     return descriptor != null ? descriptor.getCmdClass() : null;
1517   }
1518 
1519   public static void main(final String[] args) throws Exception {
1520     int res = ToolRunner.run(new PerformanceEvaluation(HBaseConfiguration.create()), args);
1521     System.exit(res);
1522   }
1523 }