001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.mapreduce;
019
020import static java.lang.String.format;
021
022import java.io.File;
023import java.io.IOException;
024import java.util.ArrayList;
025import java.util.Base64;
026import java.util.HashSet;
027import java.util.Set;
028import org.apache.commons.lang3.StringUtils;
029import org.apache.hadoop.conf.Configuration;
030import org.apache.hadoop.conf.Configured;
031import org.apache.hadoop.fs.Path;
032import org.apache.hadoop.hbase.HBaseConfiguration;
033import org.apache.hadoop.hbase.HColumnDescriptor;
034import org.apache.hadoop.hbase.HConstants;
035import org.apache.hadoop.hbase.HTableDescriptor;
036import org.apache.hadoop.hbase.TableName;
037import org.apache.hadoop.hbase.TableNotEnabledException;
038import org.apache.hadoop.hbase.TableNotFoundException;
039import org.apache.hadoop.hbase.client.Admin;
040import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
041import org.apache.hadoop.hbase.client.Connection;
042import org.apache.hadoop.hbase.client.ConnectionFactory;
043import org.apache.hadoop.hbase.client.Put;
044import org.apache.hadoop.hbase.client.RegionLocator;
045import org.apache.hadoop.hbase.client.Table;
046import org.apache.hadoop.hbase.client.TableDescriptor;
047import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
048import org.apache.hadoop.hbase.util.Bytes;
049import org.apache.hadoop.hbase.util.Pair;
050import org.apache.hadoop.io.Text;
051import org.apache.hadoop.mapreduce.Job;
052import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
053import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
054import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
055import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
056import org.apache.hadoop.security.Credentials;
057import org.apache.hadoop.util.Tool;
058import org.apache.hadoop.util.ToolRunner;
059import org.apache.yetus.audience.InterfaceAudience;
060import org.slf4j.Logger;
061import org.slf4j.LoggerFactory;
062
063import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
064import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
065import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
066
067/**
068 * Tool to import data from a TSV file. This tool is rather simplistic - it doesn't do any quoting
069 * or escaping, but is useful for many data loads.
070 * @see ImportTsv#usage(String)
071 */
072@InterfaceAudience.Public
073public class ImportTsv extends Configured implements Tool {
074
075  protected static final Logger LOG = LoggerFactory.getLogger(ImportTsv.class);
076
077  final static String NAME = "importtsv";
078
079  public final static String MAPPER_CONF_KEY = "importtsv.mapper.class";
080  public final static String BULK_OUTPUT_CONF_KEY = "importtsv.bulk.output";
081  public final static String TIMESTAMP_CONF_KEY = "importtsv.timestamp";
082  public final static String JOB_NAME_CONF_KEY = "mapreduce.job.name";
083  // TODO: the rest of these configs are used exclusively by TsvImporterMapper.
084  // Move them out of the tool and let the mapper handle its own validation.
085  public final static String DRY_RUN_CONF_KEY = "importtsv.dry.run";
086  // If true, bad lines are logged to stderr. Default: false.
087  public final static String LOG_BAD_LINES_CONF_KEY = "importtsv.log.bad.lines";
088  public final static String SKIP_LINES_CONF_KEY = "importtsv.skip.bad.lines";
089  public final static String SKIP_EMPTY_COLUMNS = "importtsv.skip.empty.columns";
090  public final static String COLUMNS_CONF_KEY = "importtsv.columns";
091  public final static String SEPARATOR_CONF_KEY = "importtsv.separator";
092  public final static String ATTRIBUTE_SEPERATOR_CONF_KEY = "attributes.seperator";
093  // This config is used to propagate credentials from parent MR jobs which launch
094  // ImportTSV jobs. SEE IntegrationTestImportTsv.
095  public final static String CREDENTIALS_LOCATION = "credentials_location";
096  final static String DEFAULT_SEPARATOR = "\t";
097  final static String DEFAULT_ATTRIBUTES_SEPERATOR = "=>";
098  final static String DEFAULT_MULTIPLE_ATTRIBUTES_SEPERATOR = ",";
099  final static Class DEFAULT_MAPPER = TsvImporterMapper.class;
100  public final static String CREATE_TABLE_CONF_KEY = "create.table";
101  public final static String NO_STRICT_COL_FAMILY = "no.strict";
102  /**
103   * If table didn't exist and was created in dry-run mode, this flag is flipped to delete it when
104   * MR ends.
105   */
106  private static boolean DRY_RUN_TABLE_CREATED;
107
108  public static class TsvParser {
109    /**
110     * Column families and qualifiers mapped to the TSV columns
111     */
112    private final byte[][] families;
113    private final byte[][] qualifiers;
114
115    private final byte separatorByte;
116
117    private int rowKeyColumnIndex;
118
119    private int maxColumnCount;
120
121    // Default value must be negative
122    public static final int DEFAULT_TIMESTAMP_COLUMN_INDEX = -1;
123
124    private int timestampKeyColumnIndex = DEFAULT_TIMESTAMP_COLUMN_INDEX;
125
126    public static final String ROWKEY_COLUMN_SPEC = "HBASE_ROW_KEY";
127
128    public static final String TIMESTAMPKEY_COLUMN_SPEC = "HBASE_TS_KEY";
129
130    public static final String ATTRIBUTES_COLUMN_SPEC = "HBASE_ATTRIBUTES_KEY";
131
132    public static final String CELL_VISIBILITY_COLUMN_SPEC = "HBASE_CELL_VISIBILITY";
133
134    public static final String CELL_TTL_COLUMN_SPEC = "HBASE_CELL_TTL";
135
136    private int attrKeyColumnIndex = DEFAULT_ATTRIBUTES_COLUMN_INDEX;
137
138    public static final int DEFAULT_ATTRIBUTES_COLUMN_INDEX = -1;
139
140    public static final int DEFAULT_CELL_VISIBILITY_COLUMN_INDEX = -1;
141
142    public static final int DEFAULT_CELL_TTL_COLUMN_INDEX = -1;
143
144    private int cellVisibilityColumnIndex = DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
145
146    private int cellTTLColumnIndex = DEFAULT_CELL_TTL_COLUMN_INDEX;
147
148    /**
149     * @param columnsSpecification the list of columns to parser out, comma separated. The row key
150     *                             should be the special token TsvParser.ROWKEY_COLUMN_SPEC
151     */
152    public TsvParser(String columnsSpecification, String separatorStr) {
153      // Configure separator
154      byte[] separator = Bytes.toBytes(separatorStr);
155      Preconditions.checkArgument(separator.length == 1,
156        "TsvParser only supports single-byte separators");
157      separatorByte = separator[0];
158
159      // Configure columns
160      ArrayList<String> columnStrings =
161        Lists.newArrayList(Splitter.on(',').trimResults().split(columnsSpecification));
162
163      maxColumnCount = columnStrings.size();
164      families = new byte[maxColumnCount][];
165      qualifiers = new byte[maxColumnCount][];
166
167      for (int i = 0; i < columnStrings.size(); i++) {
168        String str = columnStrings.get(i);
169        if (ROWKEY_COLUMN_SPEC.equals(str)) {
170          rowKeyColumnIndex = i;
171          continue;
172        }
173        if (TIMESTAMPKEY_COLUMN_SPEC.equals(str)) {
174          timestampKeyColumnIndex = i;
175          continue;
176        }
177        if (ATTRIBUTES_COLUMN_SPEC.equals(str)) {
178          attrKeyColumnIndex = i;
179          continue;
180        }
181        if (CELL_VISIBILITY_COLUMN_SPEC.equals(str)) {
182          cellVisibilityColumnIndex = i;
183          continue;
184        }
185        if (CELL_TTL_COLUMN_SPEC.equals(str)) {
186          cellTTLColumnIndex = i;
187          continue;
188        }
189        String[] parts = str.split(":", 2);
190        if (parts.length == 1) {
191          families[i] = str.getBytes();
192          qualifiers[i] = HConstants.EMPTY_BYTE_ARRAY;
193        } else {
194          families[i] = parts[0].getBytes();
195          qualifiers[i] = parts[1].getBytes();
196        }
197      }
198    }
199
200    public boolean hasTimestamp() {
201      return timestampKeyColumnIndex != DEFAULT_TIMESTAMP_COLUMN_INDEX;
202    }
203
204    public int getTimestampKeyColumnIndex() {
205      return timestampKeyColumnIndex;
206    }
207
208    public boolean hasAttributes() {
209      return attrKeyColumnIndex != DEFAULT_ATTRIBUTES_COLUMN_INDEX;
210    }
211
212    public boolean hasCellVisibility() {
213      return cellVisibilityColumnIndex != DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
214    }
215
216    public boolean hasCellTTL() {
217      return cellTTLColumnIndex != DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
218    }
219
220    public int getAttributesKeyColumnIndex() {
221      return attrKeyColumnIndex;
222    }
223
224    public int getCellVisibilityColumnIndex() {
225      return cellVisibilityColumnIndex;
226    }
227
228    public int getCellTTLColumnIndex() {
229      return cellTTLColumnIndex;
230    }
231
232    public int getRowKeyColumnIndex() {
233      return rowKeyColumnIndex;
234    }
235
236    public byte[] getFamily(int idx) {
237      return families[idx];
238    }
239
240    public byte[] getQualifier(int idx) {
241      return qualifiers[idx];
242    }
243
244    public ParsedLine parse(byte[] lineBytes, int length) throws BadTsvLineException {
245      // Enumerate separator offsets
246      ArrayList<Integer> tabOffsets = new ArrayList<>(maxColumnCount);
247      for (int i = 0; i < length; i++) {
248        if (lineBytes[i] == separatorByte) {
249          tabOffsets.add(i);
250        }
251      }
252      if (tabOffsets.isEmpty()) {
253        throw new BadTsvLineException("No delimiter");
254      }
255
256      tabOffsets.add(length);
257
258      if (tabOffsets.size() > maxColumnCount) {
259        throw new BadTsvLineException("Excessive columns");
260      } else if (tabOffsets.size() <= getRowKeyColumnIndex()) {
261        throw new BadTsvLineException("No row key");
262      } else if (hasTimestamp() && tabOffsets.size() <= getTimestampKeyColumnIndex()) {
263        throw new BadTsvLineException("No timestamp");
264      } else if (hasAttributes() && tabOffsets.size() <= getAttributesKeyColumnIndex()) {
265        throw new BadTsvLineException("No attributes specified");
266      } else if (hasCellVisibility() && tabOffsets.size() <= getCellVisibilityColumnIndex()) {
267        throw new BadTsvLineException("No cell visibility specified");
268      } else if (hasCellTTL() && tabOffsets.size() <= getCellTTLColumnIndex()) {
269        throw new BadTsvLineException("No cell TTL specified");
270      }
271      return new ParsedLine(tabOffsets, lineBytes);
272    }
273
274    class ParsedLine {
275      private final ArrayList<Integer> tabOffsets;
276      private byte[] lineBytes;
277
278      ParsedLine(ArrayList<Integer> tabOffsets, byte[] lineBytes) {
279        this.tabOffsets = tabOffsets;
280        this.lineBytes = lineBytes;
281      }
282
283      public int getRowKeyOffset() {
284        return getColumnOffset(rowKeyColumnIndex);
285      }
286
287      public int getRowKeyLength() {
288        return getColumnLength(rowKeyColumnIndex);
289      }
290
291      public long getTimestamp(long ts) throws BadTsvLineException {
292        // Return ts if HBASE_TS_KEY is not configured in column spec
293        if (!hasTimestamp()) {
294          return ts;
295        }
296
297        String timeStampStr = Bytes.toString(lineBytes, getColumnOffset(timestampKeyColumnIndex),
298          getColumnLength(timestampKeyColumnIndex));
299        try {
300          return Long.parseLong(timeStampStr);
301        } catch (NumberFormatException nfe) {
302          // treat this record as bad record
303          throw new BadTsvLineException("Invalid timestamp " + timeStampStr);
304        }
305      }
306
307      private String getAttributes() {
308        if (!hasAttributes()) {
309          return null;
310        } else {
311          return Bytes.toString(lineBytes, getColumnOffset(attrKeyColumnIndex),
312            getColumnLength(attrKeyColumnIndex));
313        }
314      }
315
316      public String[] getIndividualAttributes() {
317        String attributes = getAttributes();
318        if (attributes != null) {
319          return attributes.split(DEFAULT_MULTIPLE_ATTRIBUTES_SEPERATOR);
320        } else {
321          return null;
322        }
323      }
324
325      public int getAttributeKeyOffset() {
326        if (hasAttributes()) {
327          return getColumnOffset(attrKeyColumnIndex);
328        } else {
329          return DEFAULT_ATTRIBUTES_COLUMN_INDEX;
330        }
331      }
332
333      public int getAttributeKeyLength() {
334        if (hasAttributes()) {
335          return getColumnLength(attrKeyColumnIndex);
336        } else {
337          return DEFAULT_ATTRIBUTES_COLUMN_INDEX;
338        }
339      }
340
341      public int getCellVisibilityColumnOffset() {
342        if (hasCellVisibility()) {
343          return getColumnOffset(cellVisibilityColumnIndex);
344        } else {
345          return DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
346        }
347      }
348
349      public int getCellVisibilityColumnLength() {
350        if (hasCellVisibility()) {
351          return getColumnLength(cellVisibilityColumnIndex);
352        } else {
353          return DEFAULT_CELL_VISIBILITY_COLUMN_INDEX;
354        }
355      }
356
357      public String getCellVisibility() {
358        if (!hasCellVisibility()) {
359          return null;
360        } else {
361          return Bytes.toString(lineBytes, getColumnOffset(cellVisibilityColumnIndex),
362            getColumnLength(cellVisibilityColumnIndex));
363        }
364      }
365
366      public int getCellTTLColumnOffset() {
367        if (hasCellTTL()) {
368          return getColumnOffset(cellTTLColumnIndex);
369        } else {
370          return DEFAULT_CELL_TTL_COLUMN_INDEX;
371        }
372      }
373
374      public int getCellTTLColumnLength() {
375        if (hasCellTTL()) {
376          return getColumnLength(cellTTLColumnIndex);
377        } else {
378          return DEFAULT_CELL_TTL_COLUMN_INDEX;
379        }
380      }
381
382      public long getCellTTL() {
383        if (!hasCellTTL()) {
384          return 0;
385        } else {
386          return Bytes.toLong(lineBytes, getColumnOffset(cellTTLColumnIndex),
387            getColumnLength(cellTTLColumnIndex));
388        }
389      }
390
391      public int getColumnOffset(int idx) {
392        if (idx > 0) return tabOffsets.get(idx - 1) + 1;
393        else return 0;
394      }
395
396      public int getColumnLength(int idx) {
397        return tabOffsets.get(idx) - getColumnOffset(idx);
398      }
399
400      public int getColumnCount() {
401        return tabOffsets.size();
402      }
403
404      public byte[] getLineBytes() {
405        return lineBytes;
406      }
407    }
408
409    public static class BadTsvLineException extends Exception {
410      public BadTsvLineException(String err) {
411        super(err);
412      }
413
414      private static final long serialVersionUID = 1L;
415    }
416
417    /**
418     * Return starting position and length of row key from the specified line bytes.
419     * @return Pair of row key offset and length.
420     */
421    public Pair<Integer, Integer> parseRowKey(byte[] lineBytes, int length)
422      throws BadTsvLineException {
423      int rkColumnIndex = 0;
424      int startPos = 0, endPos = 0;
425      for (int i = 0; i <= length; i++) {
426        if (i == length || lineBytes[i] == separatorByte) {
427          endPos = i - 1;
428          if (rkColumnIndex++ == getRowKeyColumnIndex()) {
429            if ((endPos + 1) == startPos) {
430              throw new BadTsvLineException("Empty value for ROW KEY.");
431            }
432            break;
433          } else {
434            startPos = endPos + 2;
435          }
436        }
437        if (i == length) {
438          throw new BadTsvLineException("Row key does not exist as number of columns in the line"
439            + " are less than row key position.");
440        }
441      }
442      return new Pair<>(startPos, endPos - startPos + 1);
443    }
444  }
445
446  /**
447   * Sets up the actual job.
448   * @param conf The current configuration.
449   * @param args The command line parameters.
450   * @return The newly created job.
451   * @throws IOException When setting up the job fails.
452   */
453  protected static Job createSubmittableJob(Configuration conf, String[] args)
454    throws IOException, ClassNotFoundException {
455    Job job = null;
456    boolean isDryRun = conf.getBoolean(DRY_RUN_CONF_KEY, false);
457    try (Connection connection = ConnectionFactory.createConnection(conf)) {
458      try (Admin admin = connection.getAdmin()) {
459        // Support non-XML supported characters
460        // by re-encoding the passed separator as a Base64 string.
461        String actualSeparator = conf.get(SEPARATOR_CONF_KEY);
462        if (actualSeparator != null) {
463          conf.set(SEPARATOR_CONF_KEY,
464            Bytes.toString(Base64.getEncoder().encode(actualSeparator.getBytes())));
465        }
466
467        // See if a non-default Mapper was set
468        String mapperClassName = conf.get(MAPPER_CONF_KEY);
469        Class mapperClass =
470          mapperClassName != null ? Class.forName(mapperClassName) : DEFAULT_MAPPER;
471
472        TableName tableName = TableName.valueOf(args[0]);
473        Path inputDir = new Path(args[1]);
474        String jobName = conf.get(JOB_NAME_CONF_KEY, NAME + "_" + tableName.getNameAsString());
475        job = Job.getInstance(conf, jobName);
476        job.setJarByClass(mapperClass);
477        FileInputFormat.setInputPaths(job, inputDir);
478        job.setInputFormatClass(TextInputFormat.class);
479        job.setMapperClass(mapperClass);
480        job.setMapOutputKeyClass(ImmutableBytesWritable.class);
481        String hfileOutPath = conf.get(BULK_OUTPUT_CONF_KEY);
482        String[] columns = conf.getStrings(COLUMNS_CONF_KEY);
483        if (StringUtils.isNotEmpty(conf.get(CREDENTIALS_LOCATION))) {
484          String fileLoc = conf.get(CREDENTIALS_LOCATION);
485          Credentials cred = Credentials.readTokenStorageFile(new File(fileLoc), conf);
486          job.getCredentials().addAll(cred);
487        }
488
489        if (hfileOutPath != null) {
490          if (!admin.tableExists(tableName)) {
491            LOG.warn(format("Table '%s' does not exist.", tableName));
492            if ("yes".equalsIgnoreCase(conf.get(CREATE_TABLE_CONF_KEY, "yes"))) {
493              // TODO: this is backwards. Instead of depending on the existence of a table,
494              // create a sane splits file for HFileOutputFormat based on data sampling.
495              createTable(admin, tableName, columns);
496              if (isDryRun) {
497                LOG.warn("Dry run: Table will be deleted at end of dry run.");
498                synchronized (ImportTsv.class) {
499                  DRY_RUN_TABLE_CREATED = true;
500                }
501              }
502            } else {
503              String errorMsg = format("Table '%s' does not exist and '%s' is set to no.",
504                tableName, CREATE_TABLE_CONF_KEY);
505              LOG.error(errorMsg);
506              throw new TableNotFoundException(errorMsg);
507            }
508          }
509          try (Table table = connection.getTable(tableName);
510            RegionLocator regionLocator = connection.getRegionLocator(tableName)) {
511            boolean noStrict = conf.getBoolean(NO_STRICT_COL_FAMILY, false);
512            // if no.strict is false then check column family
513            if (!noStrict) {
514              ArrayList<String> unmatchedFamilies = new ArrayList<>();
515              Set<String> cfSet = getColumnFamilies(columns);
516              TableDescriptor tDesc = table.getDescriptor();
517              for (String cf : cfSet) {
518                if (!tDesc.hasColumnFamily(Bytes.toBytes(cf))) {
519                  unmatchedFamilies.add(cf);
520                }
521              }
522              if (unmatchedFamilies.size() > 0) {
523                ArrayList<String> familyNames = new ArrayList<>();
524                for (ColumnFamilyDescriptor family : table.getDescriptor().getColumnFamilies()) {
525                  familyNames.add(family.getNameAsString());
526                }
527                String msg = "Column Families " + unmatchedFamilies + " specified in "
528                  + COLUMNS_CONF_KEY + " does not match with any of the table " + tableName
529                  + " column families " + familyNames + ".\n"
530                  + "To disable column family check, use -D" + NO_STRICT_COL_FAMILY + "=true.\n";
531                usage(msg);
532                System.exit(-1);
533              }
534            }
535            if (mapperClass.equals(TsvImporterTextMapper.class)) {
536              job.setMapOutputValueClass(Text.class);
537              job.setReducerClass(TextSortReducer.class);
538            } else {
539              job.setMapOutputValueClass(Put.class);
540              job.setCombinerClass(PutCombiner.class);
541              job.setReducerClass(PutSortReducer.class);
542            }
543            if (!isDryRun) {
544              Path outputDir = new Path(hfileOutPath);
545              FileOutputFormat.setOutputPath(job, outputDir);
546              HFileOutputFormat2.configureIncrementalLoad(job, table.getDescriptor(),
547                regionLocator);
548            }
549          }
550        } else {
551          if (!admin.tableExists(tableName)) {
552            String errorMsg = format("Table '%s' does not exist.", tableName);
553            LOG.error(errorMsg);
554            throw new TableNotFoundException(errorMsg);
555          }
556          if (mapperClass.equals(TsvImporterTextMapper.class)) {
557            usage(TsvImporterTextMapper.class.toString()
558              + " should not be used for non bulkloading case. use "
559              + TsvImporterMapper.class.toString() + " or custom mapper whose value type is Put.");
560            System.exit(-1);
561          }
562          if (!isDryRun) {
563            // No reducers. Just write straight to table. Call initTableReducerJob
564            // to set up the TableOutputFormat.
565            TableMapReduceUtil.initTableReducerJob(tableName.getNameAsString(), null, job);
566          }
567          job.setNumReduceTasks(0);
568        }
569        if (isDryRun) {
570          job.setOutputFormatClass(NullOutputFormat.class);
571          job.getConfiguration().setStrings("io.serializations",
572            job.getConfiguration().get("io.serializations"), MutationSerialization.class.getName(),
573            ResultSerialization.class.getName(), CellSerialization.class.getName());
574        }
575        TableMapReduceUtil.addDependencyJars(job);
576        TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
577          org.apache.hbase.thirdparty.com.google.common.base.Function.class /*
578                                                                             * Guava used by
579                                                                             * TsvParser
580                                                                             */);
581      }
582    }
583    return job;
584  }
585
586  private static void createTable(Admin admin, TableName tableName, String[] columns)
587    throws IOException {
588    HTableDescriptor htd = new HTableDescriptor(tableName);
589    Set<String> cfSet = getColumnFamilies(columns);
590    for (String cf : cfSet) {
591      HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes(cf));
592      htd.addFamily(hcd);
593    }
594    LOG.warn(
595      format("Creating table '%s' with '%s' columns and default descriptors.", tableName, cfSet));
596    admin.createTable(htd);
597  }
598
599  private static void deleteTable(Configuration conf, String[] args) {
600    TableName tableName = TableName.valueOf(args[0]);
601    try (Connection connection = ConnectionFactory.createConnection(conf);
602      Admin admin = connection.getAdmin()) {
603      try {
604        admin.disableTable(tableName);
605      } catch (TableNotEnabledException e) {
606        LOG.debug("Dry mode: Table: " + tableName + " already disabled, so just deleting it.");
607      }
608      admin.deleteTable(tableName);
609    } catch (IOException e) {
610      LOG.error(format("***Dry run: Failed to delete table '%s'.***%n%s", tableName, e.toString()));
611      return;
612    }
613    LOG.info(format("Dry run: Deleted table '%s'.", tableName));
614  }
615
616  private static Set<String> getColumnFamilies(String[] columns) {
617    Set<String> cfSet = new HashSet<>();
618    for (String aColumn : columns) {
619      if (
620        TsvParser.ROWKEY_COLUMN_SPEC.equals(aColumn)
621          || TsvParser.TIMESTAMPKEY_COLUMN_SPEC.equals(aColumn)
622          || TsvParser.CELL_VISIBILITY_COLUMN_SPEC.equals(aColumn)
623          || TsvParser.CELL_TTL_COLUMN_SPEC.equals(aColumn)
624          || TsvParser.ATTRIBUTES_COLUMN_SPEC.equals(aColumn)
625      ) continue;
626      // we are only concerned with the first one (in case this is a cf:cq)
627      cfSet.add(aColumn.split(":", 2)[0]);
628    }
629    return cfSet;
630  }
631
632  /*
633   * @param errorMsg Error message. Can be null.
634   */
635  private static void usage(final String errorMsg) {
636    if (errorMsg != null && errorMsg.length() > 0) {
637      System.err.println("ERROR: " + errorMsg);
638    }
639    String usage = "Usage: " + NAME + " -D" + COLUMNS_CONF_KEY + "=a,b,c <tablename> <inputdir>\n"
640      + "\n" + "Imports the given input directory of TSV data into the specified table.\n" + "\n"
641      + "The column names of the TSV data must be specified using the -D" + COLUMNS_CONF_KEY + "\n"
642      + "option. This option takes the form of comma-separated column names, where each\n"
643      + "column name is either a simple column family, or a columnfamily:qualifier. The special\n"
644      + "column name " + TsvParser.ROWKEY_COLUMN_SPEC
645      + " is used to designate that this column should be used\n"
646      + "as the row key for each imported record. You must specify exactly one column\n"
647      + "to be the row key, and you must specify a column name for every column that exists in the\n"
648      + "input data. Another special column" + TsvParser.TIMESTAMPKEY_COLUMN_SPEC
649      + " designates that this column should be\n" + "used as timestamp for each record. Unlike "
650      + TsvParser.ROWKEY_COLUMN_SPEC + ", " + TsvParser.TIMESTAMPKEY_COLUMN_SPEC + " is optional."
651      + "\n" + "You must specify at most one column as timestamp key for each imported record.\n"
652      + "Record with invalid timestamps (blank, non-numeric) will be treated as bad record.\n"
653      + "Note: if you use this option, then '" + TIMESTAMP_CONF_KEY + "' option will be ignored.\n"
654      + "\n" + "Other special columns that can be specified are " + TsvParser.CELL_TTL_COLUMN_SPEC
655      + " and " + TsvParser.CELL_VISIBILITY_COLUMN_SPEC + ".\n" + TsvParser.CELL_TTL_COLUMN_SPEC
656      + " designates that this column will be used " + "as a Cell's Time To Live (TTL) attribute.\n"
657      + TsvParser.CELL_VISIBILITY_COLUMN_SPEC + " designates that this column contains the "
658      + "visibility label expression.\n" + "\n" + TsvParser.ATTRIBUTES_COLUMN_SPEC
659      + " can be used to specify Operation Attributes per record.\n"
660      + " Should be specified as key=>value where " + TsvParser.DEFAULT_ATTRIBUTES_COLUMN_INDEX
661      + " is used \n"
662      + " as the seperator.  Note that more than one OperationAttributes can be specified.\n"
663      + "By default importtsv will load data directly into HBase. To instead generate\n"
664      + "HFiles of data to prepare for a bulk data load, pass the option:\n" + "  -D"
665      + BULK_OUTPUT_CONF_KEY + "=/path/for/output\n"
666      + "  Note: if you do not use this option, then the target table must already exist in HBase\n"
667      + "\n" + "Other options that may be specified with -D include:\n" + "  -D" + DRY_RUN_CONF_KEY
668      + "=true - Dry run mode. Data is not actually populated into"
669      + " table. If table does not exist, it is created but deleted in the end.\n" + "  -D"
670      + SKIP_LINES_CONF_KEY + "=false - fail if encountering an invalid line\n" + "  -D"
671      + LOG_BAD_LINES_CONF_KEY + "=true - logs invalid lines to stderr\n" + "  -D"
672      + SKIP_EMPTY_COLUMNS + "=false - If true then skip empty columns in bulk import\n" + "  '-D"
673      + SEPARATOR_CONF_KEY + "=|' - eg separate on pipes instead of tabs\n" + "  -D"
674      + TIMESTAMP_CONF_KEY + "=currentTimeAsLong - use the specified timestamp for the import\n"
675      + "  -D" + MAPPER_CONF_KEY + "=my.Mapper - A user-defined Mapper to use instead of "
676      + DEFAULT_MAPPER.getName() + "\n" + "  -D" + JOB_NAME_CONF_KEY
677      + "=jobName - use the specified mapreduce job name for the import\n" + "  -D"
678      + CREATE_TABLE_CONF_KEY + "=no - can be used to avoid creation of table by this tool\n"
679      + "  Note: if you set this to 'no', then the target table must already exist in HBase\n"
680      + "  -D" + NO_STRICT_COL_FAMILY + "=true - ignore column family check in hbase table. "
681      + "Default is false\n\n" + "For performance consider the following options:\n"
682      + "  -Dmapreduce.map.speculative=false\n" + "  -Dmapreduce.reduce.speculative=false";
683
684    System.err.println(usage);
685  }
686
687  @Override
688  public int run(String[] args) throws Exception {
689    if (args.length < 2) {
690      usage("Wrong number of arguments: " + args.length);
691      return -1;
692    }
693
694    // When MAPPER_CONF_KEY is null, the user wants to use the provided TsvImporterMapper, so
695    // perform validation on these additional args. When it's not null, user has provided their
696    // own mapper, thus these validation are not relevant.
697    // TODO: validation for TsvImporterMapper, not this tool. Move elsewhere.
698    if (null == getConf().get(MAPPER_CONF_KEY)) {
699      // Make sure columns are specified
700      String[] columns = getConf().getStrings(COLUMNS_CONF_KEY);
701      if (columns == null) {
702        usage("No columns specified. Please specify with -D" + COLUMNS_CONF_KEY + "=...");
703        return -1;
704      }
705
706      // Make sure they specify exactly one column as the row key
707      int rowkeysFound = 0;
708      for (String col : columns) {
709        if (col.equals(TsvParser.ROWKEY_COLUMN_SPEC)) rowkeysFound++;
710      }
711      if (rowkeysFound != 1) {
712        usage("Must specify exactly one column as " + TsvParser.ROWKEY_COLUMN_SPEC);
713        return -1;
714      }
715
716      // Make sure we have at most one column as the timestamp key
717      int tskeysFound = 0;
718      for (String col : columns) {
719        if (col.equals(TsvParser.TIMESTAMPKEY_COLUMN_SPEC)) tskeysFound++;
720      }
721      if (tskeysFound > 1) {
722        usage("Must specify at most one column as " + TsvParser.TIMESTAMPKEY_COLUMN_SPEC);
723        return -1;
724      }
725
726      int attrKeysFound = 0;
727      for (String col : columns) {
728        if (col.equals(TsvParser.ATTRIBUTES_COLUMN_SPEC)) attrKeysFound++;
729      }
730      if (attrKeysFound > 1) {
731        usage("Must specify at most one column as " + TsvParser.ATTRIBUTES_COLUMN_SPEC);
732        return -1;
733      }
734
735      // Make sure one or more columns are specified excluding rowkey and
736      // timestamp key
737      if (columns.length - (rowkeysFound + tskeysFound + attrKeysFound) < 1) {
738        usage(
739          "One or more columns in addition to the row key and timestamp(optional) are required");
740        return -1;
741      }
742    }
743
744    // If timestamp option is not specified, use current system time.
745    long timstamp = getConf().getLong(TIMESTAMP_CONF_KEY, System.currentTimeMillis());
746
747    // Set it back to replace invalid timestamp (non-numeric) with current
748    // system time
749    getConf().setLong(TIMESTAMP_CONF_KEY, timstamp);
750
751    synchronized (ImportTsv.class) {
752      DRY_RUN_TABLE_CREATED = false;
753    }
754    Job job = createSubmittableJob(getConf(), args);
755    boolean success = job.waitForCompletion(true);
756    boolean delete = false;
757    synchronized (ImportTsv.class) {
758      delete = DRY_RUN_TABLE_CREATED;
759    }
760    if (delete) {
761      deleteTable(getConf(), args);
762    }
763    return success ? 0 : 1;
764  }
765
766  public static void main(String[] args) throws Exception {
767    int status = ToolRunner.run(HBaseConfiguration.create(), new ImportTsv(), args);
768    System.exit(status);
769  }
770}