1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.regionserver.wal;
19
20 import static org.junit.Assert.assertFalse;
21
22 import java.io.IOException;
23
24 import org.apache.commons.logging.Log;
25 import org.apache.commons.logging.LogFactory;
26 import org.apache.hadoop.fs.FileSystem;
27 import org.apache.hadoop.fs.Path;
28 import org.apache.hadoop.hbase.HBaseTestingUtility;
29 import org.apache.hadoop.hbase.HRegionInfo;
30 import org.apache.hadoop.hbase.HTableDescriptor;
31 import org.apache.hadoop.hbase.KeyValue;
32 import org.apache.hadoop.hbase.MediumTests;
33 import org.apache.hadoop.hbase.util.Bytes;
34 import org.junit.Test;
35 import org.junit.experimental.categories.Category;
36
37
38
39
40 @Category(MediumTests.class)
41 public class TestLogRollingNoCluster {
42 private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
43 private final static byte [] EMPTY_1K_ARRAY = new byte[1024];
44 private static final int THREAD_COUNT = 100;
45
46
47
48
49
50
51
52 @Test
53 public void testContendedLogRolling() throws IOException, InterruptedException {
54 FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration());
55 Path dir = TEST_UTIL.getDataTestDir();
56 HLog wal = new HLog(fs, new Path(dir, "logs"), new Path(dir, "oldlogs"),
57 TEST_UTIL.getConfiguration());
58 Appender [] appenders = null;
59
60 final int count = THREAD_COUNT;
61 appenders = new Appender[count];
62 try {
63 for (int i = 0; i < count; i++) {
64
65 appenders[i] = new Appender(wal, i, count);
66 }
67 for (int i = 0; i < count; i++) {
68 appenders[i].start();
69 }
70 for (int i = 0; i < count; i++) {
71
72 appenders[i].join();
73 }
74 } finally {
75 wal.close();
76 }
77 for (int i = 0; i < count; i++) {
78 assertFalse(appenders[i].isException());
79 }
80 }
81
82
83
84
85 static class Appender extends Thread {
86 private final Log log;
87 private final HLog wal;
88 private final int count;
89 private Exception e = null;
90
91 Appender(final HLog wal, final int index, final int count) {
92 super("" + index);
93 this.wal = wal;
94 this.count = count;
95 this.log = LogFactory.getLog("Appender:" + getName());
96 }
97
98
99
100
101 boolean isException() {
102 return !isAlive() && this.e != null;
103 }
104
105 Exception getException() {
106 return this.e;
107 }
108
109 @Override
110 public void run() {
111 this.log.info(getName() +" started");
112 try {
113 for (int i = 0; i < this.count; i++) {
114 long now = System.currentTimeMillis();
115
116 if (i % 10 == 0 && this.wal.getNumEntries() > 0) {
117 this.wal.rollWriter();
118 }
119 WALEdit edit = new WALEdit();
120 byte[] bytes = Bytes.toBytes(i);
121 edit.add(new KeyValue(bytes, bytes, bytes, now, EMPTY_1K_ARRAY));
122
123 this.wal.append(HRegionInfo.FIRST_META_REGIONINFO,
124 HTableDescriptor.META_TABLEDESC.getName(),
125 edit, now, HTableDescriptor.META_TABLEDESC);
126 }
127 String msg = getName() + " finished";
128 if (isException())
129 this.log.info(msg, getException());
130 else
131 this.log.info(msg);
132 } catch (Exception e) {
133 this.e = e;
134 log.info("Caught exception from Appender:" + getName(), e);
135 }
136 }
137 }
138
139 @org.junit.Rule
140 public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
141 new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
142 }