1   /*
2    * Copyright 2009 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  package org.apache.hadoop.hbase.thrift;
21  
22  import static org.junit.Assert.assertEquals;
23  import static org.junit.Assert.assertFalse;
24  import static org.junit.Assert.assertTrue;
25  
26  import java.io.IOException;
27  import java.nio.ByteBuffer;
28  import java.util.ArrayList;
29  import java.util.List;
30  import java.util.Map;
31  
32  import org.apache.commons.logging.Log;
33  import org.apache.commons.logging.LogFactory;
34  import org.apache.hadoop.conf.Configuration;
35  import org.apache.hadoop.hbase.HBaseTestingUtility;
36  import org.apache.hadoop.hbase.MediumTests;
37  import org.apache.hadoop.hbase.filter.ParseFilter;
38  import org.apache.hadoop.hbase.thrift.generated.BatchMutation;
39  import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
40  import org.apache.hadoop.hbase.thrift.generated.Hbase;
41  import org.apache.hadoop.hbase.thrift.generated.Mutation;
42  import org.apache.hadoop.hbase.thrift.generated.TCell;
43  import org.apache.hadoop.hbase.thrift.generated.TRegionInfo;
44  import org.apache.hadoop.hbase.thrift.generated.TRowResult;
45  import org.apache.hadoop.hbase.thrift.generated.TIncrement;
46  import org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler;
47  import org.apache.hadoop.hbase.util.Bytes;
48  import org.apache.hadoop.metrics.ContextFactory;
49  import org.apache.hadoop.metrics.MetricsContext;
50  import org.apache.hadoop.metrics.MetricsUtil;
51  import org.apache.hadoop.metrics.spi.NoEmitMetricsContext;
52  import org.apache.hadoop.metrics.spi.OutputRecord;
53  import org.junit.AfterClass;
54  import org.junit.BeforeClass;
55  import org.junit.Test;
56  import org.junit.experimental.categories.Category;
57  
58  /**
59   * Unit testing for ThriftServerRunner.HBaseHandler, a part of the
60   * org.apache.hadoop.hbase.thrift package.
61   */
62  @Category(MediumTests.class)
63  public class TestThriftServer {
64    private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
65    private static final Log LOG = LogFactory.getLog(TestThriftServer.class);
66    protected static final int MAXVERSIONS = 3;
67  
68    private static ByteBuffer asByteBuffer(String i) {
69      return ByteBuffer.wrap(Bytes.toBytes(i));
70    }
71    private static ByteBuffer asByteBuffer(long l) {
72      return ByteBuffer.wrap(Bytes.toBytes(l));
73    }
74  
75    // Static names for tables, columns, rows, and values
76    private static ByteBuffer tableAname = asByteBuffer("tableA");
77    private static ByteBuffer tableBname = asByteBuffer("tableB");
78    private static ByteBuffer columnAname = asByteBuffer("columnA:");
79    private static ByteBuffer columnAAname = asByteBuffer("columnA:A");
80    private static ByteBuffer columnBname = asByteBuffer("columnB:");
81    private static ByteBuffer rowAname = asByteBuffer("rowA");
82    private static ByteBuffer rowBname = asByteBuffer("rowB");
83    private static ByteBuffer valueAname = asByteBuffer("valueA");
84    private static ByteBuffer valueBname = asByteBuffer("valueB");
85    private static ByteBuffer valueCname = asByteBuffer("valueC");
86    private static ByteBuffer valueDname = asByteBuffer("valueD");
87    private static ByteBuffer valueEname = asByteBuffer(100l);
88  
89    @BeforeClass
90    public static void beforeClass() throws Exception {
91      UTIL.getConfiguration().setBoolean(ThriftServerRunner.COALESCE_INC_KEY, true);
92      UTIL.startMiniCluster();
93    }
94  
95    @AfterClass
96    public static void afterClass() throws Exception {
97      UTIL.shutdownMiniCluster();
98    }
99  
100   /**
101    * Runs all of the tests under a single JUnit test method.  We
102    * consolidate all testing to one method because HBaseClusterTestCase
103    * is prone to OutOfMemoryExceptions when there are three or more
104    * JUnit test methods.
105    *
106    * @throws Exception
107    */
108   @Test
109   public void testAll() throws Exception {
110     // Run all tests
111     doTestTableCreateDrop();
112     doTestThriftMetrics();
113     doTestTableMutations();
114     doTestTableTimestampsAndColumns();
115     doTestTableScanners();
116     doTestGetTableRegions();
117     doTestFilterRegistration();
118   }
119 
120   /**
121    * Tests for creating, enabling, disabling, and deleting tables.  Also
122    * tests that creating a table with an invalid column name yields an
123    * IllegalArgument exception.
124    *
125    * @throws Exception
126    */
127   public void doTestTableCreateDrop() throws Exception {
128     ThriftServerRunner.HBaseHandler handler =
129       new ThriftServerRunner.HBaseHandler(UTIL.getConfiguration());
130     doTestTableCreateDrop(handler);
131   }
132   public static void doTestTableCreateDrop(Hbase.Iface handler) throws Exception {
133     createTestTables(handler);
134     dropTestTables(handler);
135   }
136 
137   /**
138    * Tests if the metrics for thrift handler work correctly
139    */
140   public void doTestThriftMetrics() throws Exception {
141     Configuration conf = UTIL.getConfiguration();
142     ThriftMetrics metrics = getMetrics(conf);
143     Hbase.Iface handler = getHandler(metrics, conf);
144     createTestTables(handler);
145     dropTestTables(handler);
146     verifyMetrics(metrics, "createTable_num_ops", 2);
147     verifyMetrics(metrics, "deleteTable_num_ops", 2);
148     verifyMetrics(metrics, "disableTable_num_ops", 2);
149   }
150 
151   private static Hbase.Iface getHandler(ThriftMetrics metrics, Configuration conf)
152       throws Exception {
153     Hbase.Iface handler =
154       new ThriftServerRunner.HBaseHandler(conf);
155     return HbaseHandlerMetricsProxy.newInstance(handler, metrics, conf);
156   }
157 
158   private static ThriftMetrics getMetrics(Configuration conf) throws Exception {
159     setupMetricsContext();
160     return new ThriftMetrics(ThriftServerRunner.DEFAULT_LISTEN_PORT, conf, Hbase.Iface.class);
161   }
162 
163   private static void setupMetricsContext() throws IOException {
164     ContextFactory factory = ContextFactory.getFactory();
165     factory.setAttribute(ThriftMetrics.CONTEXT_NAME + ".class",
166         NoEmitMetricsContext.class.getName());
167     MetricsUtil.getContext(ThriftMetrics.CONTEXT_NAME)
168                .createRecord(ThriftMetrics.CONTEXT_NAME).remove();
169   }
170 
171   private static void verifyMetrics(ThriftMetrics metrics, String name, int expectValue)
172       throws Exception {
173     MetricsContext context = MetricsUtil.getContext(
174         ThriftMetrics.CONTEXT_NAME);
175     metrics.doUpdates(context);
176     OutputRecord record = context.getAllRecords().get(
177         ThriftMetrics.CONTEXT_NAME).iterator().next();
178     assertEquals(expectValue, record.getMetric(name).intValue());
179   }
180 
181   public static void createTestTables(Hbase.Iface handler) throws Exception {
182     // Create/enable/disable/delete tables, ensure methods act correctly
183     assertEquals(handler.getTableNames().size(), 0);
184     handler.createTable(tableAname, getColumnDescriptors());
185     assertEquals(handler.getTableNames().size(), 1);
186     assertEquals(handler.getColumnDescriptors(tableAname).size(), 2);
187     assertTrue(handler.isTableEnabled(tableAname));
188     handler.createTable(tableBname, new ArrayList<ColumnDescriptor>());
189     assertEquals(handler.getTableNames().size(), 2);
190   }
191 
192   public static void dropTestTables(Hbase.Iface handler) throws Exception {
193     handler.disableTable(tableBname);
194     assertFalse(handler.isTableEnabled(tableBname));
195     handler.deleteTable(tableBname);
196     assertEquals(handler.getTableNames().size(), 1);
197     handler.disableTable(tableAname);
198     /* TODO Reenable.
199     assertFalse(handler.isTableEnabled(tableAname));
200     handler.enableTable(tableAname);
201     assertTrue(handler.isTableEnabled(tableAname));
202     handler.disableTable(tableAname);*/
203     handler.deleteTable(tableAname);
204   }
205 
206   public void doTestIncrements() throws Exception {
207     ThriftServerRunner.HBaseHandler handler =
208         new ThriftServerRunner.HBaseHandler(UTIL.getConfiguration());
209     createTestTables(handler);
210     doTestIncrements(handler);
211     dropTestTables(handler);
212   }
213 
214   public static void doTestIncrements(HBaseHandler handler) throws Exception {
215     List<Mutation> mutations = new ArrayList<Mutation>(1);
216     mutations.add(new Mutation(false, columnAAname, valueEname, true));
217     mutations.add(new Mutation(false, columnAname, valueEname, true));
218     handler.mutateRow(tableAname, rowAname, mutations, null);
219     handler.mutateRow(tableAname, rowBname, mutations, null);
220 
221     List<TIncrement> increments = new ArrayList<TIncrement>();
222     increments.add(new TIncrement(tableAname, rowBname, columnAAname, 7));
223     increments.add(new TIncrement(tableAname, rowBname, columnAAname, 7));
224     increments.add(new TIncrement(tableAname, rowBname, columnAAname, 7));
225 
226     int numIncrements = 60000;
227     for (int i = 0; i < numIncrements; i++) {
228       handler.increment(new TIncrement(tableAname, rowAname, columnAname, 2));
229       handler.incrementRows(increments);
230     }
231 
232     Thread.sleep(1000);
233     long lv = handler.get(tableAname, rowAname, columnAname, null).get(0).value.getLong();
234     assertEquals((100 + (2 * numIncrements)), lv );
235 
236 
237     lv = handler.get(tableAname, rowBname, columnAAname, null).get(0).value.getLong();
238     assertEquals((100 + (3 * 7 * numIncrements)), lv);
239 
240     assertTrue(handler.coalescer.getSuccessfulCoalescings() > 0);
241 
242   }
243 
244   /**
245    * Tests adding a series of Mutations and BatchMutations, including a
246    * delete mutation.  Also tests data retrieval, and getting back multiple
247    * versions.
248    *
249    * @throws Exception
250    */
251   public void doTestTableMutations() throws Exception {
252     ThriftServerRunner.HBaseHandler handler =
253       new ThriftServerRunner.HBaseHandler(UTIL.getConfiguration());
254     doTestTableMutations(handler);
255   }
256 
257   public static void doTestTableMutations(Hbase.Iface handler) throws Exception {
258     // Setup
259     handler.createTable(tableAname, getColumnDescriptors());
260 
261     // Apply a few Mutations to rowA
262     //     mutations.add(new Mutation(false, columnAname, valueAname));
263     //     mutations.add(new Mutation(false, columnBname, valueBname));
264     handler.mutateRow(tableAname, rowAname, getMutations(), null);
265 
266     // Assert that the changes were made
267     assertEquals(valueAname,
268       handler.get(tableAname, rowAname, columnAname, null).get(0).value);
269     TRowResult rowResult1 = handler.getRow(tableAname, rowAname, null).get(0);
270     assertEquals(rowAname, rowResult1.row);
271     assertEquals(valueBname,
272       rowResult1.columns.get(columnBname).value);
273 
274     // Apply a few BatchMutations for rowA and rowB
275     // rowAmutations.add(new Mutation(true, columnAname, null));
276     // rowAmutations.add(new Mutation(false, columnBname, valueCname));
277     // batchMutations.add(new BatchMutation(rowAname, rowAmutations));
278     // Mutations to rowB
279     // rowBmutations.add(new Mutation(false, columnAname, valueCname));
280     // rowBmutations.add(new Mutation(false, columnBname, valueDname));
281     // batchMutations.add(new BatchMutation(rowBname, rowBmutations));
282     handler.mutateRows(tableAname, getBatchMutations(), null);
283 
284     // Assert that changes were made to rowA
285     List<TCell> cells = handler.get(tableAname, rowAname, columnAname, null);
286     assertFalse(cells.size() > 0);
287     assertEquals(valueCname, handler.get(tableAname, rowAname, columnBname, null).get(0).value);
288     List<TCell> versions = handler.getVer(tableAname, rowAname, columnBname, MAXVERSIONS, null);
289     assertEquals(valueCname, versions.get(0).value);
290     assertEquals(valueBname, versions.get(1).value);
291 
292     // Assert that changes were made to rowB
293     TRowResult rowResult2 = handler.getRow(tableAname, rowBname, null).get(0);
294     assertEquals(rowBname, rowResult2.row);
295     assertEquals(valueCname, rowResult2.columns.get(columnAname).value);
296     assertEquals(valueDname, rowResult2.columns.get(columnBname).value);
297 
298     // Apply some deletes
299     handler.deleteAll(tableAname, rowAname, columnBname, null);
300     handler.deleteAllRow(tableAname, rowBname, null);
301 
302     // Assert that the deletes were applied
303     int size = handler.get(tableAname, rowAname, columnBname, null).size();
304     assertEquals(0, size);
305     size = handler.getRow(tableAname, rowBname, null).size();
306     assertEquals(0, size);
307 
308     // Try null mutation
309     List<Mutation> mutations = new ArrayList<Mutation>();
310     mutations.add(new Mutation(false, columnAname, null, true));
311     handler.mutateRow(tableAname, rowAname, mutations, null);
312     TRowResult rowResult3 = handler.getRow(tableAname, rowAname, null).get(0);
313     assertEquals(rowAname, rowResult3.row);
314     assertEquals(0, rowResult3.columns.get(columnAname).value.remaining());
315 
316     // Teardown
317     handler.disableTable(tableAname);
318     handler.deleteTable(tableAname);
319   }
320 
321   /**
322    * Similar to testTableMutations(), except Mutations are applied with
323    * specific timestamps and data retrieval uses these timestamps to
324    * extract specific versions of data.
325    *
326    * @throws Exception
327    */
328   public void doTestTableTimestampsAndColumns() throws Exception {
329     // Setup
330     ThriftServerRunner.HBaseHandler handler =
331       new ThriftServerRunner.HBaseHandler(UTIL.getConfiguration());
332     handler.createTable(tableAname, getColumnDescriptors());
333 
334     // Apply timestamped Mutations to rowA
335     long time1 = System.currentTimeMillis();
336     handler.mutateRowTs(tableAname, rowAname, getMutations(), time1, null);
337 
338     Thread.sleep(1000);
339 
340     // Apply timestamped BatchMutations for rowA and rowB
341     long time2 = System.currentTimeMillis();
342     handler.mutateRowsTs(tableAname, getBatchMutations(), time2, null);
343 
344     // Apply an overlapping timestamped mutation to rowB
345     handler.mutateRowTs(tableAname, rowBname, getMutations(), time2, null);
346 
347     // the getVerTs is [inf, ts) so you need to increment one.
348     time1 += 1;
349     time2 += 2;
350 
351     // Assert that the timestamp-related methods retrieve the correct data
352     assertEquals(2, handler.getVerTs(tableAname, rowAname, columnBname, time2,
353       MAXVERSIONS, null).size());
354     assertEquals(1, handler.getVerTs(tableAname, rowAname, columnBname, time1,
355       MAXVERSIONS, null).size());
356 
357     TRowResult rowResult1 = handler.getRowTs(tableAname, rowAname, time1, null).get(0);
358     TRowResult rowResult2 = handler.getRowTs(tableAname, rowAname, time2, null).get(0);
359     // columnA was completely deleted
360     //assertTrue(Bytes.equals(rowResult1.columns.get(columnAname).value, valueAname));
361     assertEquals(rowResult1.columns.get(columnBname).value, valueBname);
362     assertEquals(rowResult2.columns.get(columnBname).value, valueCname);
363 
364     // ColumnAname has been deleted, and will never be visible even with a getRowTs()
365     assertFalse(rowResult2.columns.containsKey(columnAname));
366 
367     List<ByteBuffer> columns = new ArrayList<ByteBuffer>();
368     columns.add(columnBname);
369 
370     rowResult1 = handler.getRowWithColumns(tableAname, rowAname, columns, null).get(0);
371     assertEquals(rowResult1.columns.get(columnBname).value, valueCname);
372     assertFalse(rowResult1.columns.containsKey(columnAname));
373 
374     rowResult1 = handler.getRowWithColumnsTs(tableAname, rowAname, columns, time1, null).get(0);
375     assertEquals(rowResult1.columns.get(columnBname).value, valueBname);
376     assertFalse(rowResult1.columns.containsKey(columnAname));
377 
378     // Apply some timestamped deletes
379     // this actually deletes _everything_.
380     // nukes everything in columnB: forever.
381     handler.deleteAllTs(tableAname, rowAname, columnBname, time1, null);
382     handler.deleteAllRowTs(tableAname, rowBname, time2, null);
383 
384     // Assert that the timestamp-related methods retrieve the correct data
385     int size = handler.getVerTs(tableAname, rowAname, columnBname, time1, MAXVERSIONS, null).size();
386     assertEquals(0, size);
387 
388     size = handler.getVerTs(tableAname, rowAname, columnBname, time2, MAXVERSIONS, null).size();
389     assertEquals(1, size);
390 
391     // should be available....
392     assertEquals(handler.get(tableAname, rowAname, columnBname, null).get(0).value, valueCname);
393 
394     assertEquals(0, handler.getRow(tableAname, rowBname, null).size());
395 
396     // Teardown
397     handler.disableTable(tableAname);
398     handler.deleteTable(tableAname);
399   }
400 
401   /**
402    * Tests the four different scanner-opening methods (with and without
403    * a stoprow, with and without a timestamp).
404    *
405    * @throws Exception
406    */
407   public void doTestTableScanners() throws Exception {
408     // Setup
409     ThriftServerRunner.HBaseHandler handler =
410       new ThriftServerRunner.HBaseHandler(UTIL.getConfiguration());
411     handler.createTable(tableAname, getColumnDescriptors());
412 
413     // Apply timestamped Mutations to rowA
414     long time1 = System.currentTimeMillis();
415     handler.mutateRowTs(tableAname, rowAname, getMutations(), time1, null);
416 
417     // Sleep to assure that 'time1' and 'time2' will be different even with a
418     // coarse grained system timer.
419     Thread.sleep(1000);
420 
421     // Apply timestamped BatchMutations for rowA and rowB
422     long time2 = System.currentTimeMillis();
423     handler.mutateRowsTs(tableAname, getBatchMutations(), time2, null);
424 
425     time1 += 1;
426 
427     // Test a scanner on all rows and all columns, no timestamp
428     int scanner1 = handler.scannerOpen(tableAname, rowAname, getColumnList(true, true), null);
429     TRowResult rowResult1a = handler.scannerGet(scanner1).get(0);
430     assertEquals(rowResult1a.row, rowAname);
431     // This used to be '1'.  I don't know why when we are asking for two columns
432     // and when the mutations above would seem to add two columns to the row.
433     // -- St.Ack 05/12/2009
434     assertEquals(rowResult1a.columns.size(), 1);
435     assertEquals(rowResult1a.columns.get(columnBname).value, valueCname);
436 
437     TRowResult rowResult1b = handler.scannerGet(scanner1).get(0);
438     assertEquals(rowResult1b.row, rowBname);
439     assertEquals(rowResult1b.columns.size(), 2);
440     assertEquals(rowResult1b.columns.get(columnAname).value, valueCname);
441     assertEquals(rowResult1b.columns.get(columnBname).value, valueDname);
442     closeScanner(scanner1, handler);
443 
444     // Test a scanner on all rows and all columns, with timestamp
445     int scanner2 = handler.scannerOpenTs(tableAname, rowAname, getColumnList(true, true), time1, null);
446     TRowResult rowResult2a = handler.scannerGet(scanner2).get(0);
447     assertEquals(rowResult2a.columns.size(), 1);
448     // column A deleted, does not exist.
449     //assertTrue(Bytes.equals(rowResult2a.columns.get(columnAname).value, valueAname));
450     assertEquals(rowResult2a.columns.get(columnBname).value, valueBname);
451     closeScanner(scanner2, handler);
452 
453     // Test a scanner on the first row and first column only, no timestamp
454     int scanner3 = handler.scannerOpenWithStop(tableAname, rowAname, rowBname,
455         getColumnList(true, false), null);
456     closeScanner(scanner3, handler);
457 
458     // Test a scanner on the first row and second column only, with timestamp
459     int scanner4 = handler.scannerOpenWithStopTs(tableAname, rowAname, rowBname,
460         getColumnList(false, true), time1, null);
461     TRowResult rowResult4a = handler.scannerGet(scanner4).get(0);
462     assertEquals(rowResult4a.columns.size(), 1);
463     assertEquals(rowResult4a.columns.get(columnBname).value, valueBname);
464 
465     // Teardown
466     handler.disableTable(tableAname);
467     handler.deleteTable(tableAname);
468   }
469 
470   /**
471    * For HBASE-2556
472    * Tests for GetTableRegions
473    *
474    * @throws Exception
475    */
476   public void doTestGetTableRegions() throws Exception {
477     ThriftServerRunner.HBaseHandler handler =
478       new ThriftServerRunner.HBaseHandler(UTIL.getConfiguration());
479     doTestGetTableRegions(handler);
480   }
481 
482   public static void doTestGetTableRegions(Hbase.Iface handler)
483       throws Exception {
484     assertEquals(handler.getTableNames().size(), 0);
485     handler.createTable(tableAname, getColumnDescriptors());
486     assertEquals(handler.getTableNames().size(), 1);
487     List<TRegionInfo> regions = handler.getTableRegions(tableAname);
488     int regionCount = regions.size();
489     assertEquals("empty table should have only 1 region, " +
490             "but found " + regionCount, regionCount, 1);
491     LOG.info("Region found:" + regions.get(0));
492     handler.disableTable(tableAname);
493     handler.deleteTable(tableAname);
494     regionCount = handler.getTableRegions(tableAname).size();
495     assertEquals("non-existing table should have 0 region, " +
496             "but found " + regionCount, regionCount, 0);
497   }
498 
499   public void doTestFilterRegistration() throws Exception {
500     Configuration conf = UTIL.getConfiguration();
501 
502     conf.set("hbase.thrift.filters", "MyFilter:filterclass");
503 
504     ThriftServerRunner.registerFilters(conf);
505 
506     Map<String, String> registeredFilters = ParseFilter.getAllFilters();
507 
508     assertEquals("filterclass", registeredFilters.get("MyFilter"));
509   }
510 
511   /**
512    *
513    * @return a List of ColumnDescriptors for use in creating a table.  Has one
514    * default ColumnDescriptor and one ColumnDescriptor with fewer versions
515    */
516   private static List<ColumnDescriptor> getColumnDescriptors() {
517     ArrayList<ColumnDescriptor> cDescriptors = new ArrayList<ColumnDescriptor>();
518 
519     // A default ColumnDescriptor
520     ColumnDescriptor cDescA = new ColumnDescriptor();
521     cDescA.name = columnAname;
522     cDescriptors.add(cDescA);
523 
524     // A slightly customized ColumnDescriptor (only 2 versions)
525     ColumnDescriptor cDescB = new ColumnDescriptor(columnBname, 2, "NONE",
526         false, "NONE", 0, 0, false, -1);
527     cDescriptors.add(cDescB);
528 
529     return cDescriptors;
530   }
531 
532   /**
533    *
534    * @param includeA whether or not to include columnA
535    * @param includeB whether or not to include columnB
536    * @return a List of column names for use in retrieving a scanner
537    */
538   private List<ByteBuffer> getColumnList(boolean includeA, boolean includeB) {
539     List<ByteBuffer> columnList = new ArrayList<ByteBuffer>();
540     if (includeA) columnList.add(columnAname);
541     if (includeB) columnList.add(columnBname);
542     return columnList;
543   }
544 
545   /**
546    *
547    * @return a List of Mutations for a row, with columnA having valueA
548    * and columnB having valueB
549    */
550   private static List<Mutation> getMutations() {
551     List<Mutation> mutations = new ArrayList<Mutation>();
552     mutations.add(new Mutation(false, columnAname, valueAname, true));
553     mutations.add(new Mutation(false, columnBname, valueBname, true));
554     return mutations;
555   }
556 
557   /**
558    *
559    * @return a List of BatchMutations with the following effects:
560    * (rowA, columnA): delete
561    * (rowA, columnB): place valueC
562    * (rowB, columnA): place valueC
563    * (rowB, columnB): place valueD
564    */
565   private static List<BatchMutation> getBatchMutations() {
566     List<BatchMutation> batchMutations = new ArrayList<BatchMutation>();
567 
568     // Mutations to rowA.  You can't mix delete and put anymore.
569     List<Mutation> rowAmutations = new ArrayList<Mutation>();
570     rowAmutations.add(new Mutation(true, columnAname, null, true));
571     batchMutations.add(new BatchMutation(rowAname, rowAmutations));
572 
573     rowAmutations = new ArrayList<Mutation>();
574     rowAmutations.add(new Mutation(false, columnBname, valueCname, true));
575     batchMutations.add(new BatchMutation(rowAname, rowAmutations));
576 
577     // Mutations to rowB
578     List<Mutation> rowBmutations = new ArrayList<Mutation>();
579     rowBmutations.add(new Mutation(false, columnAname, valueCname, true));
580     rowBmutations.add(new Mutation(false, columnBname, valueDname, true));
581     batchMutations.add(new BatchMutation(rowBname, rowBmutations));
582 
583     return batchMutations;
584   }
585 
586   /**
587    * Asserts that the passed scanner is exhausted, and then closes
588    * the scanner.
589    *
590    * @param scannerId the scanner to close
591    * @param handler the HBaseHandler interfacing to HBase
592    * @throws Exception
593    */
594   private void closeScanner(
595       int scannerId, ThriftServerRunner.HBaseHandler handler) throws Exception {
596     handler.scannerGet(scannerId);
597     handler.scannerClose(scannerId);
598   }
599 
600   @org.junit.Rule
601   public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
602     new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
603 }
604