1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.test;
19
20 import static org.junit.Assert.assertEquals;
21 import static org.junit.Assert.assertTrue;
22
23 import java.io.IOException;
24 import java.security.PrivilegedExceptionAction;
25 import java.util.List;
26
27 import org.apache.commons.cli.CommandLine;
28 import org.apache.hadoop.conf.Configuration;
29 import org.apache.hadoop.fs.Path;
30 import org.apache.hadoop.hbase.HBaseConfiguration;
31 import org.apache.hadoop.hbase.HColumnDescriptor;
32 import org.apache.hadoop.hbase.HConstants;
33 import org.apache.hadoop.hbase.HTableDescriptor;
34 import org.apache.hadoop.hbase.IntegrationTestingUtility;
35 import org.apache.hadoop.hbase.security.visibility.VisibilityTestUtil;
36 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
37 import org.apache.hadoop.hbase.client.Admin;
38 import org.apache.hadoop.hbase.client.HBaseAdmin;
39 import org.apache.hadoop.hbase.client.Put;
40 import org.apache.hadoop.hbase.client.Result;
41 import org.apache.hadoop.hbase.client.Scan;
42 import org.apache.hadoop.hbase.client.ScannerCallable;
43 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
44 import org.apache.hadoop.hbase.io.hfile.HFile;
45 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
46 import org.apache.hadoop.hbase.mapreduce.TableMapper;
47 import org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl;
48 import org.apache.hadoop.hbase.security.User;
49 import org.apache.hadoop.hbase.security.visibility.Authorizations;
50 import org.apache.hadoop.hbase.security.visibility.CellVisibility;
51 import org.apache.hadoop.hbase.security.visibility.VisibilityClient;
52 import org.apache.hadoop.hbase.security.visibility.VisibilityController;
53 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
54 import org.apache.hadoop.hbase.util.Bytes;
55 import org.apache.hadoop.io.BytesWritable;
56 import org.apache.hadoop.io.NullWritable;
57 import org.apache.hadoop.mapreduce.Counter;
58 import org.apache.hadoop.mapreduce.Job;
59 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
60 import org.apache.hadoop.util.ToolRunner;
61 import org.junit.experimental.categories.Category;
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79 @Category(IntegrationTests.class)
80 public class IntegrationTestWithCellVisibilityLoadAndVerify extends IntegrationTestLoadAndVerify {
81 private static final String ERROR_STR =
82 "Two user names are to be specified seperated by a ',' like 'usera,userb'";
83 private static final char NOT = '!';
84 private static final char OR = '|';
85 private static final char AND = '&';
86 private static final String TEST_NAME = "IntegrationTestCellVisibilityLoadAndVerify";
87 private static final String CONFIDENTIAL = "confidential";
88 private static final String TOPSECRET = "topsecret";
89 private static final String SECRET = "secret";
90 private static final String PUBLIC = "public";
91 private static final String PRIVATE = "private";
92 private static final String[] LABELS = { CONFIDENTIAL, TOPSECRET, SECRET, PRIVATE, PUBLIC };
93 private static final String[] VISIBILITY_EXPS = { CONFIDENTIAL + AND + TOPSECRET + AND + PRIVATE,
94 CONFIDENTIAL + OR + TOPSECRET, PUBLIC,
95 '(' + SECRET + OR + PRIVATE + ')' + AND + NOT + CONFIDENTIAL };
96 private static final int VISIBILITY_EXPS_COUNT = VISIBILITY_EXPS.length;
97 private static final byte[] TEST_FAMILY = Bytes.toBytes("f1");
98 private static final byte[] TEST_QUALIFIER = Bytes.toBytes("q1");
99 private static final String NUM_TO_WRITE_KEY = "loadmapper.num_to_write";
100 private static final long NUM_TO_WRITE_DEFAULT = 100 * 1000;
101 private static final int SCANNER_CACHING = 500;
102 private static String USER_OPT = "users";
103 private static String userNames = "user1,user2";
104
105 private long numRowsLoadedWithExp1, numRowsLoadedWithExp2, numRowsLoadWithExp3,
106 numRowsLoadWithExp4;
107 private long numRowsReadWithExp1, numRowsReadWithExp2, numRowsReadWithExp3, numRowsReadWithExp4;
108
109 private static User USER1, USER2;
110
111 private enum Counters {
112 ROWS_VIS_EXP_1, ROWS_VIS_EXP_2, ROWS_VIS_EXP_3, ROWS_VIS_EXP_4;
113 }
114
115 @Override
116 public void setUpCluster() throws Exception {
117 util = getTestingUtil(null);
118 Configuration conf = util.getConfiguration();
119 VisibilityTestUtil.enableVisiblityLabels(conf);
120 conf.set("hbase.superuser", User.getCurrent().getName());
121 conf.setBoolean("dfs.permissions", false);
122 super.setUpCluster();
123 String[] users = userNames.split(",");
124 if (users.length != 2) {
125 System.err.println(ERROR_STR);
126 throw new IOException(ERROR_STR);
127 }
128 System.out.println(userNames + " "+users[0]+ " "+users[1]);
129 USER1 = User.createUserForTesting(conf, users[0], new String[] {});
130 USER2 = User.createUserForTesting(conf, users[1], new String[] {});
131 addLabelsAndAuths();
132 }
133
134 @Override
135 protected void addOptions() {
136 super.addOptions();
137 addOptWithArg("u", USER_OPT, "User names to be passed");
138 }
139
140 private void addLabelsAndAuths() throws Exception {
141 try {
142 VisibilityClient.addLabels(util.getConnection(), LABELS);
143 VisibilityClient.setAuths(util.getConnection(), new String[] { CONFIDENTIAL, TOPSECRET,
144 SECRET, PRIVATE }, USER1.getName());
145 VisibilityClient.setAuths(util.getConnection(), new String[] { PUBLIC },
146 USER2.getName());
147 } catch (Throwable t) {
148 throw new IOException(t);
149 }
150 }
151
152 public static class LoadWithCellVisibilityMapper extends LoadMapper {
153 private Counter rowsExp1, rowsExp2, rowsExp3, rowsexp4;
154
155 @Override
156 public void setup(Context context) throws IOException {
157 super.setup(context);
158 rowsExp1 = context.getCounter(Counters.ROWS_VIS_EXP_1);
159 rowsExp2 = context.getCounter(Counters.ROWS_VIS_EXP_2);
160 rowsExp3 = context.getCounter(Counters.ROWS_VIS_EXP_3);
161 rowsexp4 = context.getCounter(Counters.ROWS_VIS_EXP_4);
162 }
163
164 @Override
165 protected void map(NullWritable key, NullWritable value, Context context) throws IOException,
166 InterruptedException {
167 String suffix = "/" + shortTaskId;
168 int BLOCK_SIZE = (int) (recordsToWrite / 100);
169 for (long i = 0; i < recordsToWrite;) {
170 for (long idx = 0; idx < BLOCK_SIZE && i < recordsToWrite; idx++, i++) {
171 int expIdx = rand.nextInt(BLOCK_SIZE) % VISIBILITY_EXPS_COUNT;
172 String exp = VISIBILITY_EXPS[expIdx];
173 byte[] row = Bytes.add(Bytes.toBytes(i), Bytes.toBytes(suffix), Bytes.toBytes(exp));
174 Put p = new Put(row);
175 p.add(TEST_FAMILY, TEST_QUALIFIER, HConstants.EMPTY_BYTE_ARRAY);
176 p.setCellVisibility(new CellVisibility(exp));
177 getCounter(expIdx).increment(1);
178 mutator.mutate(p);
179
180 if (i % 100 == 0) {
181 context.setStatus("Written " + i + "/" + recordsToWrite + " records");
182 context.progress();
183 }
184 }
185
186
187 mutator.flush();
188 }
189 }
190
191 private Counter getCounter(int idx) {
192 switch (idx) {
193 case 0:
194 return rowsExp1;
195 case 1:
196 return rowsExp2;
197 case 2:
198 return rowsExp3;
199 case 3:
200 return rowsexp4;
201 default:
202 return null;
203 }
204 }
205 }
206
207 public static class VerifyMapper extends TableMapper<BytesWritable, BytesWritable> {
208 private Counter rowsExp1, rowsExp2, rowsExp3, rowsExp4;
209
210 @Override
211 public void setup(Context context) throws IOException {
212 rowsExp1 = context.getCounter(Counters.ROWS_VIS_EXP_1);
213 rowsExp2 = context.getCounter(Counters.ROWS_VIS_EXP_2);
214 rowsExp3 = context.getCounter(Counters.ROWS_VIS_EXP_3);
215 rowsExp4 = context.getCounter(Counters.ROWS_VIS_EXP_4);
216 }
217
218 @Override
219 protected void map(ImmutableBytesWritable key, Result value, Context context)
220 throws IOException, InterruptedException {
221 byte[] row = value.getRow();
222 Counter c = getCounter(row);
223 c.increment(1);
224 }
225
226 private Counter getCounter(byte[] row) {
227 Counter c = null;
228 if (Bytes.indexOf(row, Bytes.toBytes(VISIBILITY_EXPS[0])) != -1) {
229 c = rowsExp1;
230 } else if (Bytes.indexOf(row, Bytes.toBytes(VISIBILITY_EXPS[1])) != -1) {
231 c = rowsExp2;
232 } else if (Bytes.indexOf(row, Bytes.toBytes(VISIBILITY_EXPS[2])) != -1) {
233 c = rowsExp3;
234 } else if (Bytes.indexOf(row, Bytes.toBytes(VISIBILITY_EXPS[3])) != -1) {
235 c = rowsExp4;
236 }
237 return c;
238 }
239 }
240
241 @Override
242 protected Job doLoad(Configuration conf, HTableDescriptor htd) throws Exception {
243 Job job = super.doLoad(conf, htd);
244 this.numRowsLoadedWithExp1 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_1).getValue();
245 this.numRowsLoadedWithExp2 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_2).getValue();
246 this.numRowsLoadWithExp3 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_3).getValue();
247 this.numRowsLoadWithExp4 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_4).getValue();
248 System.out.println("Rows loaded with cell visibility " + VISIBILITY_EXPS[0] + " : "
249 + this.numRowsLoadedWithExp1);
250 System.out.println("Rows loaded with cell visibility " + VISIBILITY_EXPS[1] + " : "
251 + this.numRowsLoadedWithExp2);
252 System.out.println("Rows loaded with cell visibility " + VISIBILITY_EXPS[2] + " : "
253 + this.numRowsLoadWithExp3);
254 System.out.println("Rows loaded with cell visibility " + VISIBILITY_EXPS[3] + " : "
255 + this.numRowsLoadWithExp4);
256 return job;
257 }
258
259 @Override
260 protected void setMapperClass(Job job) {
261 job.setMapperClass(LoadWithCellVisibilityMapper.class);
262 }
263
264 @Override
265 protected void doVerify(final Configuration conf, final HTableDescriptor htd) throws Exception {
266 System.out.println(String.format("Verifying for auths %s, %s, %s, %s", CONFIDENTIAL, TOPSECRET,
267 SECRET, PRIVATE));
268 PrivilegedExceptionAction<Job> scanAction = new PrivilegedExceptionAction<Job>() {
269 @Override
270 public Job run() throws Exception {
271 return doVerify(conf, htd, CONFIDENTIAL, TOPSECRET, SECRET, PRIVATE);
272 }
273 };
274 Job job = USER1.runAs(scanAction);
275 this.numRowsReadWithExp1 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_1).getValue();
276 this.numRowsReadWithExp2 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_2).getValue();
277 this.numRowsReadWithExp3 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_3).getValue();
278 this.numRowsReadWithExp4 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_4).getValue();
279 assertEquals(this.numRowsLoadedWithExp1, this.numRowsReadWithExp1);
280 assertEquals(this.numRowsLoadedWithExp2, this.numRowsReadWithExp2);
281 assertEquals(0, this.numRowsReadWithExp3);
282 assertEquals(0, this.numRowsReadWithExp4);
283
284
285 System.out.println(String.format("Verifying for auths %s, %s", PRIVATE, PUBLIC));
286 scanAction = new PrivilegedExceptionAction<Job>() {
287 @Override
288 public Job run() throws Exception {
289 return doVerify(conf, htd, PRIVATE, PUBLIC);
290 }
291 };
292 job = USER1.runAs(scanAction);
293 this.numRowsReadWithExp1 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_1).getValue();
294 this.numRowsReadWithExp2 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_2).getValue();
295 this.numRowsReadWithExp3 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_3).getValue();
296 this.numRowsReadWithExp4 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_4).getValue();
297 assertEquals(0, this.numRowsReadWithExp1);
298 assertEquals(0, this.numRowsReadWithExp2);
299 assertEquals(0, this.numRowsReadWithExp3);
300 assertEquals(this.numRowsLoadWithExp4, this.numRowsReadWithExp4);
301
302
303 System.out.println(String.format("Verifying for auths %s, %s", PRIVATE, PUBLIC));
304 scanAction = new PrivilegedExceptionAction<Job>() {
305 @Override
306 public Job run() throws Exception {
307 return doVerify(conf, htd, PRIVATE, PUBLIC);
308 }
309 };
310 job = USER2.runAs(scanAction);
311 this.numRowsReadWithExp1 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_1).getValue();
312 this.numRowsReadWithExp2 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_2).getValue();
313 this.numRowsReadWithExp3 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_3).getValue();
314 this.numRowsReadWithExp4 = job.getCounters().findCounter(Counters.ROWS_VIS_EXP_4).getValue();
315 assertEquals(0, this.numRowsReadWithExp1);
316 assertEquals(0, this.numRowsReadWithExp2);
317 assertEquals(this.numRowsLoadWithExp3, this.numRowsReadWithExp3);
318 assertEquals(0, this.numRowsReadWithExp4);
319 }
320
321 private Job doVerify(Configuration conf, HTableDescriptor htd, String... auths)
322 throws IOException, InterruptedException, ClassNotFoundException {
323 Path outputDir = getTestDir(TEST_NAME, "verify-output");
324 Job job = new Job(conf);
325 job.setJarByClass(this.getClass());
326 job.setJobName(TEST_NAME + " Verification for " + htd.getTableName());
327 setJobScannerConf(job);
328 Scan scan = new Scan();
329 scan.setAuthorizations(new Authorizations(auths));
330 TableMapReduceUtil.initTableMapperJob(htd.getTableName().getNameAsString(), scan,
331 VerifyMapper.class, NullWritable.class, NullWritable.class, job);
332 TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class);
333 int scannerCaching = conf.getInt("verify.scannercaching", SCANNER_CACHING);
334 TableMapReduceUtil.setScannerCaching(job, scannerCaching);
335 job.setNumReduceTasks(0);
336 FileOutputFormat.setOutputPath(job, outputDir);
337 assertTrue(job.waitForCompletion(true));
338 return job;
339 }
340
341 private static void setJobScannerConf(Job job) {
342 job.getConfiguration().setBoolean(ScannerCallable.LOG_SCANNER_ACTIVITY, true);
343 long lpr = job.getConfiguration().getLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT) / 100;
344 job.getConfiguration().setInt(TableRecordReaderImpl.LOG_PER_ROW_COUNT, (int) lpr);
345 }
346
347 @Override
348 public void printUsage() {
349 System.err.println(this.getClass().getSimpleName() + " -u usera,userb [-Doptions]");
350 System.err.println(" Loads a table with cell visibilities and verifies with Authorizations");
351 System.err.println("Options");
352 System.err
353 .println(" -Dloadmapper.table=<name> Table to write/verify (default autogen)");
354 System.err.println(" -Dloadmapper.num_to_write=<n> "
355 + "Number of rows per mapper (default 100,000 per mapper)");
356 System.err.println(" -Dloadmapper.numPresplits=<n> "
357 + "Number of presplit regions to start with (default 40)");
358 System.err
359 .println(" -Dloadmapper.map.tasks=<n> Number of map tasks for load (default 200)");
360 System.err.println(" -Dverify.scannercaching=<n> "
361 + "Number hbase scanner caching rows to read (default 50)");
362 }
363
364 @Override
365 public int runTestFromCommandLine() throws Exception {
366 IntegrationTestingUtility.setUseDistributedCluster(getConf());
367 int numPresplits = getConf().getInt("loadmapper.numPresplits", 5);
368
369 HTableDescriptor htd = new HTableDescriptor(getTablename());
370 htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
371
372 Admin admin = new HBaseAdmin(getConf());
373 try {
374 admin.createTable(htd, Bytes.toBytes(0L), Bytes.toBytes(-1L), numPresplits);
375 } finally {
376 admin.close();
377 }
378 doLoad(getConf(), htd);
379 doVerify(getConf(), htd);
380 getTestingUtil(getConf()).deleteTable(htd.getName());
381 return 0;
382 }
383
384 @Override
385 protected void processOptions(CommandLine cmd) {
386 List args = cmd.getArgList();
387 if (args.size() > 0) {
388 printUsage();
389 throw new RuntimeException("No args expected.");
390 }
391
392 args.add("loadAndVerify");
393 if (cmd.hasOption(USER_OPT)) {
394 userNames = cmd.getOptionValue(USER_OPT);
395 }
396 super.processOptions(cmd);
397 }
398
399 public static void main(String argv[]) throws Exception {
400 Configuration conf = HBaseConfiguration.create();
401 IntegrationTestingUtility.setUseDistributedCluster(conf);
402 int ret = ToolRunner.run(conf, new IntegrationTestWithCellVisibilityLoadAndVerify(), argv);
403 System.exit(ret);
404 }
405 }