1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.mapreduce;
20
21 import java.io.File;
22 import java.io.IOException;
23 import java.lang.reflect.InvocationTargetException;
24 import java.lang.reflect.Method;
25 import java.net.URL;
26 import java.net.URLDecoder;
27 import java.util.ArrayList;
28 import java.util.Collection;
29 import java.util.Enumeration;
30 import java.util.HashMap;
31 import java.util.HashSet;
32 import java.util.List;
33 import java.util.Map;
34 import java.util.Set;
35 import java.util.zip.ZipEntry;
36 import java.util.zip.ZipFile;
37
38 import com.google.protobuf.InvalidProtocolBufferException;
39 import com.yammer.metrics.core.MetricsRegistry;
40
41 import org.apache.commons.logging.Log;
42 import org.apache.commons.logging.LogFactory;
43 import org.apache.hadoop.hbase.classification.InterfaceAudience;
44 import org.apache.hadoop.hbase.classification.InterfaceStability;
45 import org.apache.hadoop.conf.Configuration;
46 import org.apache.hadoop.fs.FileSystem;
47 import org.apache.hadoop.fs.Path;
48 import org.apache.hadoop.hbase.HBaseConfiguration;
49 import org.apache.hadoop.hbase.HConstants;
50 import org.apache.hadoop.hbase.catalog.MetaReader;
51 import org.apache.hadoop.hbase.client.HConnection;
52 import org.apache.hadoop.hbase.client.HConnectionManager;
53 import org.apache.hadoop.hbase.client.Put;
54 import org.apache.hadoop.hbase.client.Scan;
55 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
56 import org.apache.hadoop.hbase.mapreduce.hadoopbackport.JarFinder;
57 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
58 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
59 import org.apache.hadoop.hbase.security.User;
60 import org.apache.hadoop.hbase.security.UserProvider;
61 import org.apache.hadoop.hbase.security.token.TokenUtil;
62 import org.apache.hadoop.hbase.util.Base64;
63 import org.apache.hadoop.hbase.util.Bytes;
64 import org.apache.hadoop.hbase.zookeeper.ZKConfig;
65 import org.apache.hadoop.io.Writable;
66 import org.apache.hadoop.io.WritableComparable;
67 import org.apache.hadoop.mapreduce.InputFormat;
68 import org.apache.hadoop.mapreduce.Job;
69 import org.apache.hadoop.util.StringUtils;
70
71
72
73
74
75 @SuppressWarnings({ "rawtypes", "unchecked" })
76 @InterfaceAudience.Public
77 @InterfaceStability.Stable
78 public class TableMapReduceUtil {
79 static Log LOG = LogFactory.getLog(TableMapReduceUtil.class);
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94 public static void initTableMapperJob(String table, Scan scan,
95 Class<? extends TableMapper> mapper,
96 Class<?> outputKeyClass,
97 Class<?> outputValueClass, Job job)
98 throws IOException {
99 initTableMapperJob(table, scan, mapper, outputKeyClass, outputValueClass,
100 job, true);
101 }
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116 public static void initTableMapperJob(byte[] table, Scan scan,
117 Class<? extends TableMapper> mapper,
118 Class<?> outputKeyClass,
119 Class<?> outputValueClass, Job job)
120 throws IOException {
121 initTableMapperJob(Bytes.toString(table), scan, mapper, outputKeyClass, outputValueClass,
122 job, true);
123 }
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140 public static void initTableMapperJob(String table, Scan scan,
141 Class<? extends TableMapper> mapper,
142 Class<?> outputKeyClass,
143 Class<?> outputValueClass, Job job,
144 boolean addDependencyJars, Class<? extends InputFormat> inputFormatClass)
145 throws IOException {
146 initTableMapperJob(table, scan, mapper, outputKeyClass, outputValueClass, job,
147 addDependencyJars, true, inputFormatClass);
148 }
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168 public static void initTableMapperJob(String table, Scan scan,
169 Class<? extends TableMapper> mapper,
170 Class<?> outputKeyClass,
171 Class<?> outputValueClass, Job job,
172 boolean addDependencyJars, boolean initCredentials,
173 Class<? extends InputFormat> inputFormatClass)
174 throws IOException {
175 job.setInputFormatClass(inputFormatClass);
176 if (outputValueClass != null) job.setMapOutputValueClass(outputValueClass);
177 if (outputKeyClass != null) job.setMapOutputKeyClass(outputKeyClass);
178 job.setMapperClass(mapper);
179 if (Put.class.equals(outputValueClass)) {
180 job.setCombinerClass(PutCombiner.class);
181 }
182 Configuration conf = job.getConfiguration();
183 HBaseConfiguration.merge(conf, HBaseConfiguration.create(conf));
184 conf.set(TableInputFormat.INPUT_TABLE, table);
185 conf.set(TableInputFormat.SCAN, convertScanToString(scan));
186 conf.setStrings("io.serializations", conf.get("io.serializations"),
187 MutationSerialization.class.getName(), ResultSerialization.class.getName(),
188 KeyValueSerialization.class.getName());
189 if (addDependencyJars) {
190 addDependencyJars(job);
191 }
192 if (initCredentials) {
193 initCredentials(job);
194 }
195 }
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213 public static void initTableMapperJob(byte[] table, Scan scan,
214 Class<? extends TableMapper> mapper,
215 Class<?> outputKeyClass,
216 Class<?> outputValueClass, Job job,
217 boolean addDependencyJars, Class<? extends InputFormat> inputFormatClass)
218 throws IOException {
219 initTableMapperJob(Bytes.toString(table), scan, mapper, outputKeyClass,
220 outputValueClass, job, addDependencyJars, inputFormatClass);
221 }
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238 public static void initTableMapperJob(byte[] table, Scan scan,
239 Class<? extends TableMapper> mapper,
240 Class<?> outputKeyClass,
241 Class<?> outputValueClass, Job job,
242 boolean addDependencyJars)
243 throws IOException {
244 initTableMapperJob(Bytes.toString(table), scan, mapper, outputKeyClass,
245 outputValueClass, job, addDependencyJars, TableInputFormat.class);
246 }
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263 public static void initTableMapperJob(String table, Scan scan,
264 Class<? extends TableMapper> mapper,
265 Class<?> outputKeyClass,
266 Class<?> outputValueClass, Job job,
267 boolean addDependencyJars)
268 throws IOException {
269 initTableMapperJob(table, scan, mapper, outputKeyClass,
270 outputValueClass, job, addDependencyJars, TableInputFormat.class);
271 }
272
273
274
275
276
277
278
279 public static void resetCacheConfig(Configuration conf) {
280 conf.setFloat(
281 HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT);
282 conf.setFloat("hbase.offheapcache.percentage", 0f);
283 conf.setFloat("hbase.bucketcache.size", 0f);
284 conf.unset("hbase.bucketcache.ioengine");
285 }
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301 public static void initMultiTableSnapshotMapperJob(Map<String, Collection<Scan>> snapshotScans,
302 Class<? extends TableMapper> mapper, Class<?> outputKeyClass, Class<?> outputValueClass,
303 Job job, boolean addDependencyJars, Path tmpRestoreDir) throws IOException {
304 MultiTableSnapshotInputFormat.setInput(job.getConfiguration(), snapshotScans, tmpRestoreDir);
305
306 job.setInputFormatClass(MultiTableSnapshotInputFormat.class);
307 if (outputValueClass != null) {
308 job.setMapOutputValueClass(outputValueClass);
309 }
310 if (outputKeyClass != null) {
311 job.setMapOutputKeyClass(outputKeyClass);
312 }
313 job.setMapperClass(mapper);
314 Configuration conf = job.getConfiguration();
315 HBaseConfiguration.merge(conf, HBaseConfiguration.create(conf));
316
317 if (addDependencyJars) {
318 addDependencyJars(job);
319 addDependencyJars(job.getConfiguration(), MetricsRegistry.class);
320 }
321
322 resetCacheConfig(job.getConfiguration());
323 }
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345 public static void initTableSnapshotMapperJob(String snapshotName, Scan scan,
346 Class<? extends TableMapper> mapper,
347 Class<?> outputKeyClass,
348 Class<?> outputValueClass, Job job,
349 boolean addDependencyJars, Path tmpRestoreDir)
350 throws IOException {
351 TableSnapshotInputFormat.setInput(job, snapshotName, tmpRestoreDir);
352 initTableMapperJob(snapshotName, scan, mapper, outputKeyClass,
353 outputValueClass, job, addDependencyJars, false, TableSnapshotInputFormat.class);
354 resetCacheConfig(job.getConfiguration());
355 }
356
357
358
359
360
361
362
363
364
365
366
367
368
369 public static void initTableMapperJob(List<Scan> scans,
370 Class<? extends TableMapper> mapper,
371 Class<? extends WritableComparable> outputKeyClass,
372 Class<? extends Writable> outputValueClass, Job job) throws IOException {
373 initTableMapperJob(scans, mapper, outputKeyClass, outputValueClass, job,
374 true);
375 }
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391 public static void initTableMapperJob(List<Scan> scans,
392 Class<? extends TableMapper> mapper,
393 Class<? extends WritableComparable> outputKeyClass,
394 Class<? extends Writable> outputValueClass, Job job,
395 boolean addDependencyJars) throws IOException {
396 initTableMapperJob(scans, mapper, outputKeyClass, outputValueClass, job,
397 addDependencyJars, true);
398 }
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415 public static void initTableMapperJob(List<Scan> scans,
416 Class<? extends TableMapper> mapper,
417 Class<? extends WritableComparable> outputKeyClass,
418 Class<? extends Writable> outputValueClass, Job job,
419 boolean addDependencyJars,
420 boolean initCredentials) throws IOException {
421 job.setInputFormatClass(MultiTableInputFormat.class);
422 if (outputValueClass != null) {
423 job.setMapOutputValueClass(outputValueClass);
424 }
425 if (outputKeyClass != null) {
426 job.setMapOutputKeyClass(outputKeyClass);
427 }
428 job.setMapperClass(mapper);
429 Configuration conf = job.getConfiguration();
430 HBaseConfiguration.merge(conf, HBaseConfiguration.create(conf));
431 List<String> scanStrings = new ArrayList<String>();
432
433 for (Scan scan : scans) {
434 scanStrings.add(convertScanToString(scan));
435 }
436 job.getConfiguration().setStrings(MultiTableInputFormat.SCANS,
437 scanStrings.toArray(new String[scanStrings.size()]));
438
439 if (addDependencyJars) {
440 addDependencyJars(job);
441 }
442
443 if (initCredentials) {
444 initCredentials(job);
445 }
446 }
447
448 public static void initCredentials(Job job) throws IOException {
449 UserProvider userProvider = UserProvider.instantiate(job.getConfiguration());
450 if (userProvider.isHadoopSecurityEnabled()) {
451
452 if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
453 job.getConfiguration().set("mapreduce.job.credentials.binary",
454 System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
455 }
456 }
457
458 if (userProvider.isHBaseSecurityEnabled()) {
459 try {
460
461 String quorumAddress = job.getConfiguration().get(TableOutputFormat.QUORUM_ADDRESS);
462 User user = userProvider.getCurrent();
463 if (quorumAddress != null) {
464 Configuration peerConf = HBaseConfiguration.createClusterConf(job.getConfiguration(),
465 quorumAddress, TableOutputFormat.OUTPUT_CONF_PREFIX);
466 HConnection peerConn = HConnectionManager.createConnection(peerConf);
467 try {
468 TokenUtil.addTokenForJob(peerConn, user, job);
469 } finally {
470 peerConn.close();
471 }
472 }
473
474 HConnection conn = HConnectionManager.createConnection(job.getConfiguration());
475 try {
476 TokenUtil.addTokenForJob(conn, user, job);
477 } finally {
478 conn.close();
479 }
480 } catch (InterruptedException ie) {
481 LOG.info("Interrupted obtaining user authentication token");
482 Thread.currentThread().interrupt();
483 }
484 }
485 }
486
487
488
489
490
491
492
493
494
495
496
497
498
499 @Deprecated
500 public static void initCredentialsForCluster(Job job, String quorumAddress)
501 throws IOException {
502 Configuration peerConf = HBaseConfiguration.createClusterConf(job.getConfiguration(),
503 quorumAddress);
504 initCredentialsForCluster(job, peerConf);
505 }
506
507
508
509
510
511
512
513
514
515 public static void initCredentialsForCluster(Job job, Configuration conf)
516 throws IOException {
517 UserProvider userProvider = UserProvider.instantiate(job.getConfiguration());
518 if (userProvider.isHBaseSecurityEnabled()) {
519 try {
520 HConnection peerConn = HConnectionManager.createConnection(conf);
521 try {
522 TokenUtil.addTokenForJob(peerConn, userProvider.getCurrent(), job);
523 } finally {
524 peerConn.close();
525 }
526 } catch (InterruptedException e) {
527 LOG.info("Interrupted obtaining user authentication token");
528 Thread.interrupted();
529 }
530 }
531 }
532
533
534
535
536
537
538
539
540 static String convertScanToString(Scan scan) throws IOException {
541 ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
542 return Base64.encodeBytes(proto.toByteArray());
543 }
544
545
546
547
548
549
550
551
552 static Scan convertStringToScan(String base64) throws IOException {
553 byte [] decoded = Base64.decode(base64);
554 ClientProtos.Scan scan;
555 try {
556 scan = ClientProtos.Scan.parseFrom(decoded);
557 } catch (InvalidProtocolBufferException ipbe) {
558 throw new IOException(ipbe);
559 }
560
561 return ProtobufUtil.toScan(scan);
562 }
563
564
565
566
567
568
569
570
571
572
573 public static void initTableReducerJob(String table,
574 Class<? extends TableReducer> reducer, Job job)
575 throws IOException {
576 initTableReducerJob(table, reducer, job, null);
577 }
578
579
580
581
582
583
584
585
586
587
588
589
590 public static void initTableReducerJob(String table,
591 Class<? extends TableReducer> reducer, Job job,
592 Class partitioner) throws IOException {
593 initTableReducerJob(table, reducer, job, partitioner, null, null, null);
594 }
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619 public static void initTableReducerJob(String table,
620 Class<? extends TableReducer> reducer, Job job,
621 Class partitioner, String quorumAddress, String serverClass,
622 String serverImpl) throws IOException {
623 initTableReducerJob(table, reducer, job, partitioner, quorumAddress,
624 serverClass, serverImpl, true);
625 }
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652 public static void initTableReducerJob(String table,
653 Class<? extends TableReducer> reducer, Job job,
654 Class partitioner, String quorumAddress, String serverClass,
655 String serverImpl, boolean addDependencyJars) throws IOException {
656
657 Configuration conf = job.getConfiguration();
658 HBaseConfiguration.merge(conf, HBaseConfiguration.create(conf));
659 job.setOutputFormatClass(TableOutputFormat.class);
660 if (reducer != null) job.setReducerClass(reducer);
661 conf.set(TableOutputFormat.OUTPUT_TABLE, table);
662 conf.setStrings("io.serializations", conf.get("io.serializations"),
663 MutationSerialization.class.getName(), ResultSerialization.class.getName());
664
665 if (quorumAddress != null) {
666
667 ZKConfig.validateClusterKey(quorumAddress);
668 conf.set(TableOutputFormat.QUORUM_ADDRESS,quorumAddress);
669 }
670 if (serverClass != null && serverImpl != null) {
671 conf.set(TableOutputFormat.REGION_SERVER_CLASS, serverClass);
672 conf.set(TableOutputFormat.REGION_SERVER_IMPL, serverImpl);
673 }
674 job.setOutputKeyClass(ImmutableBytesWritable.class);
675 job.setOutputValueClass(Writable.class);
676 if (partitioner == HRegionPartitioner.class) {
677 job.setPartitionerClass(HRegionPartitioner.class);
678 int regions = MetaReader.getRegionCount(conf, table);
679 if (job.getNumReduceTasks() > regions) {
680 job.setNumReduceTasks(regions);
681 }
682 } else if (partitioner != null) {
683 job.setPartitionerClass(partitioner);
684 }
685
686 if (addDependencyJars) {
687 addDependencyJars(job);
688 }
689
690 initCredentials(job);
691 }
692
693
694
695
696
697
698
699
700
701 public static void limitNumReduceTasks(String table, Job job)
702 throws IOException {
703 int regions = MetaReader.getRegionCount(job.getConfiguration(), table);
704 if (job.getNumReduceTasks() > regions)
705 job.setNumReduceTasks(regions);
706 }
707
708
709
710
711
712
713
714
715
716 public static void setNumReduceTasks(String table, Job job)
717 throws IOException {
718 job.setNumReduceTasks(MetaReader.getRegionCount(job.getConfiguration(), table));
719 }
720
721
722
723
724
725
726
727
728
729
730 public static void setScannerCaching(Job job, int batchSize) {
731 job.getConfiguration().setInt("hbase.client.scanner.caching", batchSize);
732 }
733
734
735
736
737
738
739
740
741
742
743
744
745
746 public static void addHBaseDependencyJars(Configuration conf) throws IOException {
747
748
749
750
751
752 Class prefixTreeCodecClass = null;
753 try {
754 prefixTreeCodecClass =
755 Class.forName("org.apache.hadoop.hbase.code.prefixtree.PrefixTreeCodec");
756 } catch (ClassNotFoundException e) {
757
758 LOG.warn("The hbase-prefix-tree module jar containing PrefixTreeCodec is not present." +
759 " Continuing without it.");
760 }
761
762 addDependencyJars(conf,
763
764 org.apache.hadoop.hbase.HConstants.class,
765 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.class,
766 org.apache.hadoop.hbase.client.Put.class,
767 org.apache.hadoop.hbase.CompatibilityFactory.class,
768 org.apache.hadoop.hbase.mapreduce.TableMapper.class,
769 prefixTreeCodecClass,
770
771 org.apache.zookeeper.ZooKeeper.class,
772 org.jboss.netty.channel.ChannelFactory.class,
773 com.google.protobuf.Message.class,
774 com.google.common.collect.Lists.class,
775 org.cloudera.htrace.Trace.class,
776 org.cliffc.high_scale_lib.Counter.class,
777 com.yammer.metrics.core.MetricsRegistry.class);
778 }
779
780
781
782
783
784 public static String buildDependencyClasspath(Configuration conf) {
785 if (conf == null) {
786 throw new IllegalArgumentException("Must provide a configuration object.");
787 }
788 Set<String> paths = new HashSet<String>(conf.getStringCollection("tmpjars"));
789 if (paths.size() == 0) {
790 throw new IllegalArgumentException("Configuration contains no tmpjars.");
791 }
792 StringBuilder sb = new StringBuilder();
793 for (String s : paths) {
794
795 int idx = s.indexOf(":");
796 if (idx != -1) s = s.substring(idx + 1);
797 if (sb.length() > 0) sb.append(File.pathSeparator);
798 sb.append(s);
799 }
800 return sb.toString();
801 }
802
803
804
805
806
807
808 public static void addDependencyJars(Job job) throws IOException {
809 addHBaseDependencyJars(job.getConfiguration());
810 try {
811 addDependencyJars(job.getConfiguration(),
812
813
814 job.getMapOutputKeyClass(),
815 job.getMapOutputValueClass(),
816 job.getInputFormatClass(),
817 job.getOutputKeyClass(),
818 job.getOutputValueClass(),
819 job.getOutputFormatClass(),
820 job.getPartitionerClass(),
821 job.getCombinerClass());
822 } catch (ClassNotFoundException e) {
823 throw new IOException(e);
824 }
825 }
826
827
828
829
830
831
832 public static void addDependencyJars(Configuration conf,
833 Class<?>... classes) throws IOException {
834
835 FileSystem localFs = FileSystem.getLocal(conf);
836 Set<String> jars = new HashSet<String>();
837
838 jars.addAll(conf.getStringCollection("tmpjars"));
839
840
841
842 Map<String, String> packagedClasses = new HashMap<String, String>();
843
844
845 for (Class<?> clazz : classes) {
846 if (clazz == null) continue;
847
848 Path path = findOrCreateJar(clazz, localFs, packagedClasses);
849 if (path == null) {
850 LOG.warn("Could not find jar for class " + clazz +
851 " in order to ship it to the cluster.");
852 continue;
853 }
854 if (!localFs.exists(path)) {
855 LOG.warn("Could not validate jar file " + path + " for class "
856 + clazz);
857 continue;
858 }
859 jars.add(path.toString());
860 }
861 if (jars.isEmpty()) return;
862
863 conf.set("tmpjars", StringUtils.arrayToString(jars.toArray(new String[jars.size()])));
864 }
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880 private static Path findOrCreateJar(Class<?> my_class, FileSystem fs,
881 Map<String, String> packagedClasses)
882 throws IOException {
883
884 String jar = findContainingJar(my_class, packagedClasses);
885 if (null == jar || jar.isEmpty()) {
886 jar = getJar(my_class);
887 updateMap(jar, packagedClasses);
888 }
889
890 if (null == jar || jar.isEmpty()) {
891 return null;
892 }
893
894 LOG.debug(String.format("For class %s, using jar %s", my_class.getName(), jar));
895 return new Path(jar).makeQualified(fs);
896 }
897
898
899
900
901
902
903
904 private static void updateMap(String jar, Map<String, String> packagedClasses) throws IOException {
905 if (null == jar || jar.isEmpty()) {
906 return;
907 }
908 ZipFile zip = null;
909 try {
910 zip = new ZipFile(jar);
911 for (Enumeration<? extends ZipEntry> iter = zip.entries(); iter.hasMoreElements();) {
912 ZipEntry entry = iter.nextElement();
913 if (entry.getName().endsWith("class")) {
914 packagedClasses.put(entry.getName(), jar);
915 }
916 }
917 } finally {
918 if (null != zip) zip.close();
919 }
920 }
921
922
923
924
925
926
927
928
929
930
931 private static String findContainingJar(Class<?> my_class, Map<String, String> packagedClasses)
932 throws IOException {
933 ClassLoader loader = my_class.getClassLoader();
934
935 String class_file = my_class.getName().replaceAll("\\.", "/") + ".class";
936
937 if (loader != null) {
938
939 for (Enumeration<URL> itr = loader.getResources(class_file); itr.hasMoreElements();) {
940 URL url = itr.nextElement();
941 if ("jar".equals(url.getProtocol())) {
942 String toReturn = url.getPath();
943 if (toReturn.startsWith("file:")) {
944 toReturn = toReturn.substring("file:".length());
945 }
946
947
948
949
950
951
952 toReturn = toReturn.replaceAll("\\+", "%2B");
953 toReturn = URLDecoder.decode(toReturn, "UTF-8");
954 return toReturn.replaceAll("!.*$", "");
955 }
956 }
957 }
958
959
960
961 return packagedClasses.get(class_file);
962 }
963
964
965
966
967
968
969
970
971 private static String getJar(Class<?> my_class) {
972 String ret = null;
973 String hadoopJarFinder = "org.apache.hadoop.util.JarFinder";
974 Class<?> jarFinder = null;
975 try {
976 LOG.debug("Looking for " + hadoopJarFinder + ".");
977 jarFinder = Class.forName(hadoopJarFinder);
978 LOG.debug(hadoopJarFinder + " found.");
979 Method getJar = jarFinder.getMethod("getJar", Class.class);
980 ret = (String) getJar.invoke(null, my_class);
981 } catch (ClassNotFoundException e) {
982 LOG.debug("Using backported JarFinder.");
983 ret = JarFinder.getJar(my_class);
984 } catch (InvocationTargetException e) {
985
986
987 throw new RuntimeException(e.getCause());
988 } catch (Exception e) {
989
990 throw new RuntimeException("getJar invocation failed.", e);
991 }
992
993 return ret;
994 }
995 }