The following document contains the results of RAT (Release Audit Tool).
***************************************************** Summary ------- Generated at: 2014-05-23T22:00:37-07:00 Notes: 5 Binaries: 40 Archives: 0 Standards: 1587 Apache Licensed: 1500 Generated Documents: 0 JavaDocs are generated and so license header is optional Generated files do not required license headers 87 Unknown Licenses ******************************* Unapproved licenses: src/packages/deb/hbase.control/conffile src/main/avro/hbase.avpr src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnIncrement.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumn.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTimeRange.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TMutation.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIOError.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDurability.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TRowMutations.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIllegalArgument.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDeleteType.java src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnValue.java src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ColumnSchemaMessage.java src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableListMessage.java src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellSetMessage.java src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java src/main/java/org/apache/hadoop/hbase/protobuf/generated/ErrorHandlingProtos.java src/main/java/org/apache/hadoop/hbase/thrift/generated/TColumn.java src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java src/main/java/org/apache/hadoop/hbase/thrift/generated/TCell.java src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java src/main/java/org/apache/hadoop/hbase/thrift/generated/TRegionInfo.java src/main/java/org/apache/hadoop/hbase/thrift/generated/IllegalArgument.java src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java src/main/java/org/apache/hadoop/hbase/thrift/generated/TIncrement.java src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java src/main/java/org/apache/hadoop/hbase/thrift/generated/IOError.java src/main/java/org/apache/hadoop/hbase/thrift/generated/Mutation.java src/site/resources/images/hbase_logo.svg src/site/resources/images/big_h_logo.svg src/site/resources/css/freebsd_docbook.css .arcconfig .git/FETCH_HEAD .git/description .git/info/exclude .git/gitk.cache .git/COMMIT_EDITMSG .git/ORIG_HEAD .git/hooks/pre-push.sample .git/hooks/pre-commit.sample .git/hooks/update.sample .git/hooks/applypatch-msg.sample .git/hooks/pre-rebase.sample .git/hooks/commit-msg.sample .git/hooks/post-update.sample .git/hooks/prepare-commit-msg.sample .git/hooks/pre-applypatch.sample .git/refs/tags/0.98.3RC0 .git/refs/tags/0.94.20RC0 .git/refs/heads/0.94 .git/refs/remotes/origin/master .git/refs/remotes/origin/0.96 .git/refs/remotes/origin/0.94 .git/refs/remotes/origin/hbase-10070 .git/refs/remotes/origin/HEAD .git/refs/remotes/origin/0.98 .git/logs/refs/heads/0.94 .git/logs/refs/remotes/origin/master .git/logs/refs/remotes/origin/0.96 .git/logs/refs/remotes/origin/0.94 .git/logs/refs/remotes/origin/hbase-10070 .git/logs/refs/remotes/origin/HEAD .git/logs/refs/remotes/origin/0.98 .git/logs/HEAD .git/HEAD .git/packed-refs .git/config .gitignore CHANGES.txt conf/regionservers conf/log4j.properties conf/hadoop-metrics.properties ******************************* Archives: ***************************************************** Files with Apache License headers will be marked AL Binary files (which do not require AL headers) will be marked B Compressed archives will be marked A Notices, licenses etc will be marked N N README.txt N NOTICE.txt AL src/test/resources/mapred-queues.xml AL src/test/resources/log4j.properties B src/test/resources/org/apache/hadoop/hbase/io/hfile/8e8ab58dcf39412da19833fcd8f687ac AL src/test/resources/org/apache/hadoop/hbase/PerformanceEvaluation_Counter.properties AL src/test/resources/hbase-site.xml B src/test/data/hbase-4388-root.dir.tgz AL src/test/ruby/shell/commands_test.rb AL src/test/ruby/shell/formatter_test.rb AL src/test/ruby/shell/shell_test.rb AL src/test/ruby/hbase/table_test.rb AL src/test/ruby/hbase/hbase_test.rb AL src/test/ruby/hbase/admin_test.rb AL src/test/ruby/test_helper.rb AL src/test/ruby/tests_runner.rb AL src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java AL src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java AL src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java AL src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java AL src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java AL src/test/java/org/apache/hadoop/hbase/TestCheckTestClasses.java AL src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java AL src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java AL src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java AL src/test/java/org/apache/hadoop/hbase/migration/TestMigrationFrom090To092.java AL src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java AL src/test/java/org/apache/hadoop/hbase/TestKeyValue.java AL src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java AL src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java AL src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java AL src/test/java/org/apache/hadoop/hbase/LargeTests.java AL src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java AL src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java AL src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java AL src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java AL src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java AL src/test/java/org/apache/hadoop/hbase/TestCompare.java AL src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java AL src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java AL src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java AL src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java AL src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java AL src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java AL src/test/java/org/apache/hadoop/hbase/security/TestUser.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/GenericProtocol.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationProtocol.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/GenericEndpoint.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java AL src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java AL src/test/java/org/apache/hadoop/hbase/TestLocalHBaseCluster.java AL src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java AL src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java AL src/test/java/org/apache/hadoop/hbase/master/TestMXBean.java AL src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java AL src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java AL src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java AL src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java AL src/test/java/org/apache/hadoop/hbase/master/TestOpenedRegionHandler.java AL src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java AL src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java AL src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotLogCleaner.java AL src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java AL src/test/java/org/apache/hadoop/hbase/master/handler/TestCreateTableHandler.java AL src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDescriptorModification.java AL src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java AL src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java AL src/test/java/org/apache/hadoop/hbase/master/TestDefaultLoadBalancer.java AL src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java AL src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java AL src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java AL src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java AL src/test/java/org/apache/hadoop/hbase/master/TestMasterZKSessionRecovery.java AL src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java AL src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java AL src/test/java/org/apache/hadoop/hbase/master/TestMaster.java AL src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java AL src/test/java/org/apache/hadoop/hbase/master/TestZKBasedOpenCloseRegion.java AL src/test/java/org/apache/hadoop/hbase/master/metrics/TestMasterStatistics.java AL src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java AL src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java AL src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java AL src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java AL src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java AL src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java AL src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java AL src/test/java/org/apache/hadoop/hbase/master/Mocking.java AL src/test/java/org/apache/hadoop/hbase/monitoring/TestMemoryBoundedLogMessageBuffer.java AL src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java AL src/test/java/org/apache/hadoop/hbase/TestClusterBootOrder.java AL src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java AL src/test/java/org/apache/hadoop/hbase/IntegrationTests.java AL src/test/java/org/apache/hadoop/hbase/ClassFinder.java AL src/test/java/org/apache/hadoop/hbase/SmallTests.java AL src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestWithChaosMonkey.java AL src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestSlowDeterministic.java AL src/test/java/org/apache/hadoop/hbase/ClusterManager.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/FaultySequenceFileLogReader.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCompressor.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtilsForTests.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCustomWALEditCodec.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLRUDictionary.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplayCompressed.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplitCompressed.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogMethods.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/InstrumentedSequenceFileLogWriter.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestKeyValueCompression.java AL src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogBench.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionWithCoprocessor.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java AL src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java AL src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestMXBean.java AL src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestBatchHRegionLockingAndWrites.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueSkipListSet.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileBlockCacheSummary.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundConfiguration.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java AL src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestOpenRegionHandler.java AL src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestCloseRegionHandler.java AL src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java AL src/test/java/org/apache/hadoop/hbase/regionserver/CheckedArchivingHFileCleaner.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java AL src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestScanDeleteTracker.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWildcardColumnTracker.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionBusyWait.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestHBase7051.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestMiniBatchOperationInProgress.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSelection.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java AL src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestSchemaMetrics.java AL src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestSchemaConfigured.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestExplicitColumnTracker.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConsistencyControl.java AL src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionSplitPolicy.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java AL src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestRpcMetrics.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestRSKilledWhenMasterInitializing.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressManager.java AL src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java AL src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java AL src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotTask.java AL src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java AL src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java AL src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java AL src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java AL src/test/java/org/apache/hadoop/hbase/snapshot/TestReferenceRegionHFilesTask.java AL src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java AL src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java AL src/test/java/org/apache/hadoop/hbase/snapshot/TestWALReferenceTask.java AL src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotLogSplitter.java AL src/test/java/org/apache/hadoop/hbase/snapshot/TestCopyRecoveredEditsTask.java AL src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java AL src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java AL src/test/java/org/apache/hadoop/hbase/TestSerialization.java AL src/test/java/org/apache/hadoop/hbase/TestInfoServers.java AL src/test/java/org/apache/hadoop/hbase/KeyValueTestUtil.java AL src/test/java/org/apache/hadoop/hbase/TestDrainingServer.java AL src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java AL src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java AL src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java AL src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java AL src/test/java/org/apache/hadoop/hbase/util/TestDynamicClassLoader.java AL src/test/java/org/apache/hadoop/hbase/util/TestBase64.java AL src/test/java/org/apache/hadoop/hbase/util/TestEnvironmentEdgeManager.java AL src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java AL src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java AL src/test/java/org/apache/hadoop/hbase/util/TestSizeBasedThrottler.java AL src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java AL src/test/java/org/apache/hadoop/hbase/util/MockServer.java AL src/test/java/org/apache/hadoop/hbase/util/TestDefaultEnvironmentEdge.java AL src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java AL src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java AL src/test/java/org/apache/hadoop/hbase/util/TestThreads.java AL src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java AL src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java AL src/test/java/org/apache/hadoop/hbase/util/MockRegionServerServices.java AL src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java AL src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGenerator.java AL src/test/java/org/apache/hadoop/hbase/util/TestKeying.java AL src/test/java/org/apache/hadoop/hbase/util/TestByteBloomFilter.java AL src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java AL src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java AL src/test/java/org/apache/hadoop/hbase/util/TestPoolMap.java AL src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java AL src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java AL src/test/java/org/apache/hadoop/hbase/util/EnvironmentEdgeManagerTestHelper.java AL src/test/java/org/apache/hadoop/hbase/util/ChaosMonkey.java AL src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java AL src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java AL src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java AL src/test/java/org/apache/hadoop/hbase/util/StoppableImplementation.java AL src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java AL src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckComparator.java AL src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java AL src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java AL src/test/java/org/apache/hadoop/hbase/util/hbck/TestOfflineMetaRebuildHole.java AL src/test/java/org/apache/hadoop/hbase/util/hbck/TestOfflineMetaRebuildBase.java AL src/test/java/org/apache/hadoop/hbase/util/hbck/TestOfflineMetaRebuildOverlap.java AL src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java AL src/test/java/org/apache/hadoop/hbase/util/TestSortedCopyOnWriteSet.java AL src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java AL src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java AL src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java AL src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java AL src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java AL src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java AL src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java AL src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java AL src/test/java/org/apache/hadoop/hbase/util/TestBytes.java AL src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java AL src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java AL src/test/java/org/apache/hadoop/hbase/ipc/TestPBOnWritableRpc.java AL src/test/java/org/apache/hadoop/hbase/ipc/TestProtocolExtension.java AL src/test/java/org/apache/hadoop/hbase/IngestIntegrationTestBase.java AL src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java AL src/test/java/org/apache/hadoop/hbase/HBaseCluster.java AL src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormatScan.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/TestInputSamplerTool.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/TestInputSampler.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/TestTotalOrderPartitioner.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/TestJarFinder.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapper.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan2.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java AL src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java AL src/test/java/org/apache/hadoop/hbase/ResourceChecker.java AL src/test/java/org/apache/hadoop/hbase/TestServerName.java AL src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java AL src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java AL src/test/java/org/apache/hadoop/hbase/rest/TestRESTMetrics.java AL src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java AL src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java AL src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java AL src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java AL src/test/java/org/apache/hadoop/hbase/rest/TestGZIPResponseWrapper.java AL src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java AL src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java AL src/test/java/org/apache/hadoop/hbase/rest/model/TestTableInfoModel.java AL src/test/java/org/apache/hadoop/hbase/rest/model/TestTableListModel.java AL src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java AL src/test/java/org/apache/hadoop/hbase/rest/model/TestCellSetModel.java AL src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java AL src/test/java/org/apache/hadoop/hbase/rest/model/TestRowModel.java AL src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java AL src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java AL src/test/java/org/apache/hadoop/hbase/rest/model/TestTableRegionModel.java AL src/test/java/org/apache/hadoop/hbase/rest/model/TestCellModel.java AL src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterVersionModel.java AL src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java AL src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java AL src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java AL src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteAdmin.java AL src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java AL src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java AL src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteAdminRetries.java AL src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java AL src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java AL src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java AL src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java AL src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java AL src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java AL src/test/java/org/apache/hadoop/hbase/replication/TestReplicationQueueFailoverCompressed.java AL src/test/java/org/apache/hadoop/hbase/replication/TestReplicationZookeeper.java AL src/test/java/org/apache/hadoop/hbase/replication/ReplicationSourceDummy.java AL src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java AL src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java AL src/test/java/org/apache/hadoop/hbase/replication/TestReplicationQueueFailover.java AL src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java AL src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java AL src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java AL src/test/java/org/apache/hadoop/hbase/filter/TestColumnCountGetFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java AL src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java AL src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java AL src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java AL src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java AL src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java AL src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/RandomDistribution.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheColumnFamilySummary.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/RandomSeek.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/slab/TestSlabCache.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/slab/TestSlab.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/slab/TestSingleSizeCache.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java AL src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java AL src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java AL src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java AL src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java AL src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java AL src/test/java/org/apache/hadoop/hbase/io/encoding/RedundantKVGenerator.java AL src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java AL src/test/java/org/apache/hadoop/hbase/io/encoding/TestUpgradeFromHFileV1ToEncoding.java AL src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java AL src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java AL src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java AL src/test/java/org/apache/hadoop/hbase/io/TestImmutableBytesWritable.java AL src/test/java/org/apache/hadoop/hbase/avro/TestAvroUtil.java AL src/test/java/org/apache/hadoop/hbase/avro/TestAvroServer.java AL src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java AL src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java AL src/test/java/org/apache/hadoop/hbase/ClassTestFinder.java AL src/test/java/org/apache/hadoop/hbase/HServerLoad092.java AL src/test/java/org/apache/hadoop/hbase/TestHServerAddress.java AL src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java AL src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKTable.java AL src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperNodeTracker.java AL src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKTableReadOnly.java AL src/test/java/org/apache/hadoop/hbase/zookeeper/TestRecoverableZooKeeper.java AL src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java AL src/test/java/org/apache/hadoop/hbase/zookeeper/TestHQuorumPeer.java AL src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperMainServerArg.java AL src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java AL src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java AL src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java AL src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitRule.java AL src/test/java/org/apache/hadoop/hbase/metrics/TestExactCounterMetric.java AL src/test/java/org/apache/hadoop/hbase/metrics/file/TestTimeStampingMetricsContext.java AL src/test/java/org/apache/hadoop/hbase/metrics/TestMetricsHistogram.java AL src/test/java/org/apache/hadoop/hbase/metrics/TestExponentiallyDecayingSample.java AL src/test/java/org/apache/hadoop/hbase/metrics/TestMetricsMBeanBase.java AL src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java AL src/test/java/org/apache/hadoop/hbase/TestHServerInfo.java AL src/test/java/org/apache/hadoop/hbase/constraint/AllPassConstraint.java AL src/test/java/org/apache/hadoop/hbase/constraint/AllFailConstraint.java AL src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java AL src/test/java/org/apache/hadoop/hbase/constraint/TestConstraints.java AL src/test/java/org/apache/hadoop/hbase/constraint/WorksConstraint.java AL src/test/java/org/apache/hadoop/hbase/constraint/CheckConfigurationConstraint.java AL src/test/java/org/apache/hadoop/hbase/constraint/RuntimeFailConstraint.java AL src/test/java/org/apache/hadoop/hbase/TestClassFinder.java AL src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java AL src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java AL src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java AL src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java AL src/test/java/org/apache/hadoop/hbase/catalog/TestCatalogTracker.java AL src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditor.java AL src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java AL src/test/java/org/apache/hadoop/hbase/TestHBaseFileSystem.java AL src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java AL src/test/java/org/apache/hadoop/hbase/TimestampTestBase.java AL src/test/java/org/apache/hadoop/hbase/MediumTests.java AL src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java AL src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java AL src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java AL src/test/java/org/apache/hadoop/hbase/client/TestAttributes.java AL src/test/java/org/apache/hadoop/hbase/client/TestResult.java AL src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java AL src/test/java/org/apache/hadoop/hbase/client/TestScan.java AL src/test/java/org/apache/hadoop/hbase/client/TestShell.java AL src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java AL src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java AL src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithSecureRpcEngine.java AL src/test/java/org/apache/hadoop/hbase/client/TestConnectionUtils.java AL src/test/java/org/apache/hadoop/hbase/client/TestCoprocessorHConnection.java AL src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java AL src/test/java/org/apache/hadoop/hbase/client/TestHCM.java AL src/test/java/org/apache/hadoop/hbase/client/TestHTablePool.java AL src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java AL src/test/java/org/apache/hadoop/hbase/client/TestSnapshotsFromAdmin.java AL src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java AL src/test/java/org/apache/hadoop/hbase/client/TestHTableUtil.java AL src/test/java/org/apache/hadoop/hbase/client/TestOperation.java AL src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java AL src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java AL src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java AL src/test/java/org/apache/hadoop/hbase/client/TestHConnection.java AL src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java AL src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java AL src/test/java/org/apache/hadoop/hbase/client/TestMetaMigrationRemovingHTD.java AL src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java AL src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java AL src/test/java/org/apache/hadoop/hbase/client/TestGet.java AL src/test/java/org/apache/hadoop/hbase/client/TestPutDotHas.java AL src/test/java/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java AL src/saveVersion.sh AL src/examples/healthcheck/healthcheck.sh N src/examples/README.txt AL src/examples/thrift2/DemoClient.py AL src/examples/thrift2/DemoClient.java AL src/examples/mapreduce/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java AL src/examples/mapreduce/org/apache/hadoop/hbase/mapreduce/SampleUploader.java AL src/examples/mapreduce/index-builder-setup.rb N src/examples/thrift/README.txt AL src/examples/thrift/DemoClient.rb AL src/examples/thrift/DemoClient.py AL src/examples/thrift/Makefile AL src/examples/thrift/DemoClient.cpp AL src/examples/thrift/DemoClient.php AL src/examples/thrift/DemoClient.pl AL src/examples/thrift/DemoClient.java AL src/packages/build.xml AL src/packages/update-hbase-env.sh AL src/packages/deb/hbase.control/postrm AL src/packages/deb/hbase.control/preinst AL src/packages/deb/hbase.control/prerm AL src/packages/deb/hbase.control/control AL src/packages/deb/hbase.control/postinst !????? src/packages/deb/hbase.control/conffile AL src/packages/deb/init.d/hbase-master AL src/packages/deb/init.d/hbase-regionserver AL src/packages/deb/conf-pseudo.control/prerm AL src/packages/deb/conf-pseudo.control/control AL src/packages/deb/conf-pseudo.control/postinst AL src/packages/deb/conf-pseudo.control/conffile AL src/packages/rpm/init.d/hbase-master AL src/packages/rpm/init.d/hbase-regionserver AL src/packages/rpm/spec/hbase.spec AL src/packages/rpm/spec/conf-pseudo.spec AL src/packages/conf-pseudo/hbase-site.xml AL src/assembly/all.xml AL src/main/xslt/configuration_to_docbook_section.xsl AL src/main/resources/hbase-webapps/master/tablesDetailed.jsp AL src/main/resources/hbase-webapps/master/index.html AL src/main/resources/hbase-webapps/master/table.jsp AL src/main/resources/hbase-webapps/master/snapshot.jsp AL src/main/resources/hbase-webapps/master/zk.jsp AL src/main/resources/hbase-webapps/master/master.jsp AL src/main/resources/hbase-webapps/regionserver/index.html AL src/main/resources/hbase-webapps/regionserver/regionserver.jsp AL src/main/resources/hbase-webapps/rest/index.html AL src/main/resources/hbase-webapps/rest/rest.jsp B src/main/resources/hbase-webapps/static/hbase_logo_med.gif B src/main/resources/hbase-webapps/static/favicon.ico B src/main/resources/hbase-webapps/static/hbase_logo.png AL src/main/resources/hbase-webapps/static/hbase.css AL src/main/resources/hbase-webapps/thrift/index.html AL src/main/resources/hbase-webapps/thrift/thrift.jsp AL src/main/resources/hbase-default.xml AL src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift AL src/main/resources/org/apache/hadoop/hbase/mapred/RowCounter_Counters.properties AL src/main/resources/org/apache/hadoop/hbase/mapreduce/RowCounter_Counters.properties AL src/main/resources/org/apache/hadoop/hbase/rest/protobuf/TableListMessage.proto AL src/main/resources/org/apache/hadoop/hbase/rest/protobuf/TableInfoMessage.proto AL src/main/resources/org/apache/hadoop/hbase/rest/protobuf/CellSetMessage.proto AL src/main/resources/org/apache/hadoop/hbase/rest/protobuf/CellMessage.proto AL src/main/resources/org/apache/hadoop/hbase/rest/protobuf/StorageClusterStatusMessage.proto AL src/main/resources/org/apache/hadoop/hbase/rest/protobuf/ColumnSchemaMessage.proto AL src/main/resources/org/apache/hadoop/hbase/rest/protobuf/TableSchemaMessage.proto AL src/main/resources/org/apache/hadoop/hbase/rest/protobuf/ScannerMessage.proto AL src/main/resources/org/apache/hadoop/hbase/rest/protobuf/VersionMessage.proto AL src/main/resources/org/apache/hadoop/hbase/rest/XMLSchema.xsd AL src/main/resources/org/apache/hadoop/hbase/thrift/Hbase.thrift AL src/main/javadoc/overview.html AL src/main/javadoc/org/apache/hadoop/hbase/ipc/package.html AL src/main/javadoc/org/apache/hadoop/hbase/replication/package.html AL src/main/javadoc/org/apache/hadoop/hbase/io/hfile/package.html AL src/main/javadoc/org/apache/hadoop/hbase/thrift/doc-files/index.html AL src/main/javadoc/org/apache/hadoop/hbase/thrift/doc-files/Hbase.html AL src/main/javadoc/org/apache/hadoop/hbase/thrift/doc-files/style.css AL src/main/javadoc/org/apache/hadoop/hbase/thrift/package.html AL src/main/ruby/shell/commands.rb AL src/main/ruby/shell/formatter.rb AL src/main/ruby/shell/commands/alter_status.rb AL src/main/ruby/shell/commands/get_counter.rb AL src/main/ruby/shell/commands/assign.rb AL src/main/ruby/shell/commands/move.rb AL src/main/ruby/shell/commands/disable_peer.rb AL src/main/ruby/shell/commands/zk_dump.rb AL src/main/ruby/shell/commands/enable.rb AL src/main/ruby/shell/commands/whoami.rb AL src/main/ruby/shell/commands/major_compact.rb AL src/main/ruby/shell/commands/count.rb AL src/main/ruby/shell/commands/user_permission.rb AL src/main/ruby/shell/commands/alter_async.rb AL src/main/ruby/shell/commands/deleteall.rb AL src/main/ruby/shell/commands/drop_all.rb AL src/main/ruby/shell/commands/snapshot.rb AL src/main/ruby/shell/commands/drop.rb AL src/main/ruby/shell/commands/list_snapshots.rb AL src/main/ruby/shell/commands/show_filters.rb AL src/main/ruby/shell/commands/restore_snapshot.rb AL src/main/ruby/shell/commands/get.rb AL src/main/ruby/shell/commands/revoke.rb AL src/main/ruby/shell/commands/truncate.rb AL src/main/ruby/shell/commands/incr.rb AL src/main/ruby/shell/commands/enable_peer.rb AL src/main/ruby/shell/commands/balancer.rb AL src/main/ruby/shell/commands/compact.rb AL src/main/ruby/shell/commands/disable.rb AL src/main/ruby/shell/commands/unassign.rb AL src/main/ruby/shell/commands/delete_snapshot.rb AL src/main/ruby/shell/commands/alter.rb AL src/main/ruby/shell/commands/grant.rb AL src/main/ruby/shell/commands/delete.rb AL src/main/ruby/shell/commands/version.rb AL src/main/ruby/shell/commands/clone_snapshot.rb AL src/main/ruby/shell/commands/add_peer.rb AL src/main/ruby/shell/commands/list_peers.rb AL src/main/ruby/shell/commands/status.rb AL src/main/ruby/shell/commands/is_disabled.rb AL src/main/ruby/shell/commands/list_replicated_tables.rb AL src/main/ruby/shell/commands/is_enabled.rb AL src/main/ruby/shell/commands/hlog_roll.rb AL src/main/ruby/shell/commands/create.rb AL src/main/ruby/shell/commands/remove_peer.rb AL src/main/ruby/shell/commands/exists.rb AL src/main/ruby/shell/commands/put.rb AL src/main/ruby/shell/commands/disable_all.rb AL src/main/ruby/shell/commands/describe.rb AL src/main/ruby/shell/commands/stop_replication.rb AL src/main/ruby/shell/commands/split.rb AL src/main/ruby/shell/commands/scan.rb AL src/main/ruby/shell/commands/close_region.rb AL src/main/ruby/shell/commands/enable_all.rb AL src/main/ruby/shell/commands/flush.rb AL src/main/ruby/shell/commands/list.rb AL src/main/ruby/shell/commands/balance_switch.rb AL src/main/ruby/shell/commands/start_replication.rb AL src/main/ruby/hbase.rb AL src/main/ruby/shell.rb AL src/main/ruby/hbase/hbase.rb AL src/main/ruby/hbase/security.rb AL src/main/ruby/hbase/replication_admin.rb AL src/main/ruby/hbase/table.rb AL src/main/ruby/hbase/admin.rb AL src/main/ruby/irb/hirb.rb AL src/main/protobuf/ErrorHandling.proto AL src/main/protobuf/hbase.proto !????? src/main/avro/hbase.avpr AL src/main/python/hbase/merge_conf.py AL src/main/jamon/org/apache/hadoop/hbase/tmpl/master/BackupMasterStatusTmpl.jamon AL src/main/jamon/org/apache/hadoop/hbase/tmpl/master/AssignmentManagerStatusTmpl.jamon AL src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon AL src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon AL src/main/jamon/org/apache/hadoop/hbase/tmpl/common/TaskMonitorTmpl.jamon AL src/main/java/org/apache/hadoop/hbase/DaemonThreadFactory.java AL src/main/java/org/apache/hadoop/hbase/HDFSBlocksDistribution.java AL src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java AL src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java AL src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java AL src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java AL src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutException.java AL src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionListener.java AL src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionSnare.java AL src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java AL src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java AL src/main/java/org/apache/hadoop/hbase/migration/HRegionInfo090x.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnIncrement.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumn.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTimeRange.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TMutation.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIOError.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDurability.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TRowMutations.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIllegalArgument.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDeleteType.java !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnValue.java AL src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java AL src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java AL src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java AL src/main/java/org/apache/hadoop/hbase/thrift2/package.html AL src/main/java/org/apache/hadoop/hbase/mapred/package-info.java AL src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java AL src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java AL src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java AL src/main/java/org/apache/hadoop/hbase/mapred/TableMap.java AL src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java AL src/main/java/org/apache/hadoop/hbase/mapred/TableSplit.java AL src/main/java/org/apache/hadoop/hbase/mapred/Driver.java AL src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java AL src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java AL src/main/java/org/apache/hadoop/hbase/mapred/TableReduce.java AL src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java AL src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java AL src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java AL src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java AL src/main/java/org/apache/hadoop/hbase/mapred/RowCounter.java AL src/main/java/org/apache/hadoop/hbase/DroppedSnapshotException.java AL src/main/java/org/apache/hadoop/hbase/UnknownRegionException.java AL src/main/java/org/apache/hadoop/hbase/TableNotEnabledException.java AL src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java AL src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java AL src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java AL src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinatorRpcs.java AL src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java AL src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinatorRpcs.java AL src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java AL src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java AL src/main/java/org/apache/hadoop/hbase/procedure/SubprocedureFactory.java AL src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMemberRpcs.java AL src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java AL src/main/java/org/apache/hadoop/hbase/security/KerberosInfo.java AL src/main/java/org/apache/hadoop/hbase/security/User.java AL src/main/java/org/apache/hadoop/hbase/security/TokenInfo.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/package-info.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteProtocol.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteResponse.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationProtocol.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/MasterCoprocessorEnvironment.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/WALObserver.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContext.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateProtocol.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/SecureBulkLoadClient.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.java AL src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java AL src/main/java/org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java AL src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java AL src/main/java/org/apache/hadoop/hbase/master/LoadBalancerFactory.java AL src/main/java/org/apache/hadoop/hbase/master/BulkAssigner.java AL src/main/java/org/apache/hadoop/hbase/master/ServerAndLoad.java AL src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java AL src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java AL src/main/java/org/apache/hadoop/hbase/master/UnAssignCallable.java AL src/main/java/org/apache/hadoop/hbase/master/MXBean.java AL src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java AL src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java AL src/main/java/org/apache/hadoop/hbase/master/RegionPlan.java AL src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotLogCleaner.java AL src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java AL src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java AL src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java AL src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java AL src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java AL src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java AL src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java AL src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java AL src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java AL src/main/java/org/apache/hadoop/hbase/master/AssignCallable.java AL src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java AL src/main/java/org/apache/hadoop/hbase/master/handler/MetaServerShutdownHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/DisableTableHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/TableDeleteFamilyHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/ClosedRegionHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/ModifyTableHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/TableAddFamilyHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/TableModifyFamilyHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/CreateTableHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/DeleteTableHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/ServerShutdownHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/SplitRegionHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/OpenedRegionHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/TotesHRegionInfo.java AL src/main/java/org/apache/hadoop/hbase/master/handler/TableEventHandler.java AL src/main/java/org/apache/hadoop/hbase/master/handler/EnableTableHandler.java AL src/main/java/org/apache/hadoop/hbase/master/DeadServer.java AL src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java AL src/main/java/org/apache/hadoop/hbase/master/MasterServices.java AL src/main/java/org/apache/hadoop/hbase/master/BulkReOpen.java AL src/main/java/org/apache/hadoop/hbase/master/ServerManager.java AL src/main/java/org/apache/hadoop/hbase/master/HMaster.java AL src/main/java/org/apache/hadoop/hbase/master/MXBeanImpl.java AL src/main/java/org/apache/hadoop/hbase/master/DefaultLoadBalancer.java AL src/main/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java AL src/main/java/org/apache/hadoop/hbase/master/metrics/MasterStatistics.java AL src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java AL src/main/java/org/apache/hadoop/hbase/master/cleaner/FileCleanerDelegate.java AL src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseLogCleanerDelegate.java AL src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseFileCleanerDelegate.java AL src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java AL src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java AL src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java AL src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java AL src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseHFileCleanerDelegate.java AL src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java AL src/main/java/org/apache/hadoop/hbase/master/MasterDumpServlet.java AL src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java AL src/main/java/org/apache/hadoop/hbase/master/MasterStatusServlet.java AL src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java AL src/main/java/org/apache/hadoop/hbase/RegionException.java AL src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java AL src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java AL src/main/java/org/apache/hadoop/hbase/monitoring/MemoryBoundedLogMessageBuffer.java AL src/main/java/org/apache/hadoop/hbase/monitoring/LogMonitoring.java AL src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java AL src/main/java/org/apache/hadoop/hbase/monitoring/ThreadMonitoring.java AL src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java AL src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java AL src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java AL src/main/java/org/apache/hadoop/hbase/monitoring/StateDumpServlet.java AL src/main/java/org/apache/hadoop/hbase/HealthReport.java AL src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java AL src/main/java/org/apache/hadoop/hbase/ServerName.java AL src/main/java/org/apache/hadoop/hbase/Coprocessor.java AL src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java AL src/main/java/org/apache/hadoop/hbase/VersionAnnotation.java AL src/main/java/org/apache/hadoop/hbase/UnknownRowLockException.java AL src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSourceService.java AL src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerStoppedException.java AL src/main/java/org/apache/hadoop/hbase/regionserver/RSStatusServlet.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationService.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditCodec.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/LRUDictionary.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFileSystem.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/OrphanHLogAfterSplitException.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/KeyValueCompression.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/Dictionary.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedLogCloseException.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java AL src/main/java/org/apache/hadoop/hbase/regionserver/wal/CompressionContext.java AL src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ScanDeleteTracker.java AL src/main/java/org/apache/hadoop/hbase/regionserver/LruHashMap.java AL src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ColumnCount.java AL src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransaction.java AL src/main/java/org/apache/hadoop/hbase/regionserver/RegionAlreadyInTransitionException.java AL src/main/java/org/apache/hadoop/hbase/regionserver/Store.java AL src/main/java/org/apache/hadoop/hbase/regionserver/InternalScan.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSinkService.java AL src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java AL src/main/java/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.java AL src/main/java/org/apache/hadoop/hbase/regionserver/MXBean.java AL src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java AL src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java AL src/main/java/org/apache/hadoop/hbase/regionserver/CompactionRequestor.java AL src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java AL src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java AL src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConsistencyControl.java AL src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java AL src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.java AL src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java AL src/main/java/org/apache/hadoop/hbase/regionserver/CompoundConfiguration.java AL src/main/java/org/apache/hadoop/hbase/regionserver/WrongRegionException.java AL src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlusher.java AL src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java AL src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseMetaHandler.java AL src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRootHandler.java AL src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRootHandler.java AL src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java AL src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenMetaHandler.java AL src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java AL src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java AL src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java AL src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java AL src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.java AL src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactSelection.java AL src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java AL src/main/java/org/apache/hadoop/hbase/regionserver/FlushRequester.java AL src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java AL src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java AL src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerAccounting.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ScanWildcardColumnTracker.java AL src/main/java/org/apache/hadoop/hbase/regionserver/MetaLogRoller.java AL src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java AL src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java AL src/main/java/org/apache/hadoop/hbase/regionserver/OnlineRegions.java AL src/main/java/org/apache/hadoop/hbase/regionserver/HRegionThriftServer.java AL src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ExplicitColumnTracker.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ScanType.java AL src/main/java/org/apache/hadoop/hbase/regionserver/DisabledRegionSplitPolicy.java AL src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java AL src/main/java/org/apache/hadoop/hbase/regionserver/DebugPrint.java AL src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java AL src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java AL src/main/java/org/apache/hadoop/hbase/regionserver/MiniBatchOperationInProgress.java AL src/main/java/org/apache/hadoop/hbase/regionserver/MXBeanImpl.java AL src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java AL src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java AL src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java AL src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueSkipListSet.java AL src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java AL src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java AL src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java AL src/main/java/org/apache/hadoop/hbase/regionserver/OperationStatus.java AL src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerDynamicStatistics.java AL src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerDynamicMetrics.java AL src/main/java/org/apache/hadoop/hbase/regionserver/metrics/SchemaConfigured.java AL src/main/java/org/apache/hadoop/hbase/regionserver/metrics/OperationMetrics.java AL src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionMetricsStorage.java AL src/main/java/org/apache/hadoop/hbase/regionserver/metrics/SchemaMetrics.java AL src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java AL src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerStatistics.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java AL src/main/java/org/apache/hadoop/hbase/regionserver/LeaseException.java AL src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java AL src/main/java/org/apache/hadoop/hbase/regionserver/LeaseListener.java AL src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ColumnTracker.java AL src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java AL src/main/java/org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.java AL src/main/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java AL src/main/java/org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java AL src/main/java/org/apache/hadoop/hbase/regionserver/RegionOpeningState.java AL src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java AL src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java AL src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java AL src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java AL src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java AL src/main/java/org/apache/hadoop/hbase/snapshot/CopyRecoveredEditsTask.java AL src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java AL src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotLogSplitter.java AL src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java AL src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java AL src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java AL src/main/java/org/apache/hadoop/hbase/snapshot/ReferenceRegionHFilesTask.java AL src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotTask.java AL src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java AL src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java AL src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java AL src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java AL src/main/java/org/apache/hadoop/hbase/snapshot/TakeSnapshotUtils.java AL src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java AL src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java AL src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java AL src/main/java/org/apache/hadoop/hbase/snapshot/HSnapshotDescription.java AL src/main/java/org/apache/hadoop/hbase/snapshot/TableInfoCopyTask.java AL src/main/java/org/apache/hadoop/hbase/snapshot/UnknownSnapshotException.java AL src/main/java/org/apache/hadoop/hbase/snapshot/ReferenceServerWALsTask.java AL src/main/java/org/apache/hadoop/hbase/HBaseIOException.java AL src/main/java/org/apache/hadoop/hbase/MasterAddressTracker.java AL src/main/java/org/apache/hadoop/hbase/PleaseHoldException.java AL src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterBase.java AL src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java AL src/main/java/org/apache/hadoop/hbase/util/BloomFilter.java AL src/main/java/org/apache/hadoop/hbase/util/HashedBytes.java AL src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java AL src/main/java/org/apache/hadoop/hbase/util/Objects.java AL src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java AL src/main/java/org/apache/hadoop/hbase/util/Threads.java AL src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java AL src/main/java/org/apache/hadoop/hbase/util/ClassSize.java AL src/main/java/org/apache/hadoop/hbase/util/SizeBasedThrottler.java AL src/main/java/org/apache/hadoop/hbase/util/ManualEnvironmentEdge.java AL src/main/java/org/apache/hadoop/hbase/util/EnvironmentEdge.java AL src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java AL src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java AL src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java AL src/main/java/org/apache/hadoop/hbase/util/HMerge.java AL src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java AL src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java AL src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java AL src/main/java/org/apache/hadoop/hbase/util/Sleeper.java AL src/main/java/org/apache/hadoop/hbase/util/HttpServerUtil.java AL src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java AL src/main/java/org/apache/hadoop/hbase/util/Merge.java AL src/main/java/org/apache/hadoop/hbase/util/CancelableProgressable.java AL src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java AL src/main/java/org/apache/hadoop/hbase/util/FSUtils.java AL src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java AL src/main/java/org/apache/hadoop/hbase/util/KeyRange.java AL src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java AL src/main/java/org/apache/hadoop/hbase/util/HFileArchiveUtil.java AL src/main/java/org/apache/hadoop/hbase/util/BloomFilterBase.java AL src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java AL src/main/java/org/apache/hadoop/hbase/util/DefaultEnvironmentEdge.java AL src/main/java/org/apache/hadoop/hbase/util/InfoServer.java AL src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java AL src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java AL src/main/java/org/apache/hadoop/hbase/util/PoolMap.java AL src/main/java/org/apache/hadoop/hbase/util/RetryCounterFactory.java AL src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java AL src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java AL src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java AL src/main/java/org/apache/hadoop/hbase/util/MetaUtils.java AL src/main/java/org/apache/hadoop/hbase/util/JvmVersion.java AL src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java AL src/main/java/org/apache/hadoop/hbase/util/Pair.java AL src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java AL src/main/java/org/apache/hadoop/hbase/util/BloomFilterWriter.java AL src/main/java/org/apache/hadoop/hbase/util/Bytes.java AL src/main/java/org/apache/hadoop/hbase/util/Classes.java AL src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java AL src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java AL src/main/java/org/apache/hadoop/hbase/util/GetJavaProperty.java AL src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java AL src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java AL src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.java AL src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.java AL src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java AL src/main/java/org/apache/hadoop/hbase/util/Methods.java AL src/main/java/org/apache/hadoop/hbase/util/MapreduceDependencyClasspathTool.java AL src/main/java/org/apache/hadoop/hbase/util/Strings.java AL src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java AL src/main/java/org/apache/hadoop/hbase/util/Writables.java AL src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java AL src/main/java/org/apache/hadoop/hbase/util/ByteBufferOutputStream.java AL src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java AL src/main/java/org/apache/hadoop/hbase/util/IdLock.java AL src/main/java/org/apache/hadoop/hbase/util/HasThread.java AL src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java AL src/main/java/org/apache/hadoop/hbase/util/EnvironmentEdgeManager.java AL src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java AL src/main/java/org/apache/hadoop/hbase/util/ShutdownHookManager.java AL src/main/java/org/apache/hadoop/hbase/util/SortedCopyOnWriteSet.java AL src/main/java/org/apache/hadoop/hbase/util/Hash.java AL src/main/java/org/apache/hadoop/hbase/util/Keying.java AL src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java AL src/main/java/org/apache/hadoop/hbase/util/Addressing.java AL src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java AL src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java AL src/main/java/org/apache/hadoop/hbase/util/Base64.java AL src/main/java/org/apache/hadoop/hbase/util/HBaseConfTool.java AL src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java AL src/main/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java AL src/main/java/org/apache/hadoop/hbase/ipc/ProtocolSignature.java AL src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java AL src/main/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java AL src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCStatistics.java AL src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java AL src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java AL src/main/java/org/apache/hadoop/hbase/ipc/RpcCallContext.java AL src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java AL src/main/java/org/apache/hadoop/hbase/ipc/MasterExecRPCInvoker.java AL src/main/java/org/apache/hadoop/hbase/ipc/ResponseFlag.java AL src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java AL src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java AL src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java AL src/main/java/org/apache/hadoop/hbase/ipc/VersionedProtocol.java AL src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java AL src/main/java/org/apache/hadoop/hbase/ipc/HMasterRegionInterface.java AL src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java AL src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java AL src/main/java/org/apache/hadoop/hbase/ipc/Status.java AL src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java AL src/main/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java AL src/main/java/org/apache/hadoop/hbase/ipc/RpcEngine.java AL src/main/java/org/apache/hadoop/hbase/ipc/ConnectionHeader.java AL src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java AL src/main/java/org/apache/hadoop/hbase/ipc/RequestContext.java AL src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorProtocol.java AL src/main/java/org/apache/hadoop/hbase/TableDescriptors.java AL src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/package-info.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputCommitter.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapper.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/TotalOrderPartitioner.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/JarFinder.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/InputSampler.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/TableReducer.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java AL src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java AL src/main/java/org/apache/hadoop/hbase/Chore.java AL src/main/java/org/apache/hadoop/hbase/Server.java AL src/main/java/org/apache/hadoop/hbase/HRegionLocation.java AL src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java AL src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java AL src/main/java/org/apache/hadoop/hbase/rest/RowResource.java AL src/main/java/org/apache/hadoop/hbase/rest/Main.java AL src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java AL src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java AL src/main/java/org/apache/hadoop/hbase/rest/ResourceConfig.java AL src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java AL src/main/java/org/apache/hadoop/hbase/rest/ProtobufMessageHandler.java AL src/main/java/org/apache/hadoop/hbase/rest/RootResource.java AL src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java AL src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java AL src/main/java/org/apache/hadoop/hbase/rest/Constants.java AL src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java AL src/main/java/org/apache/hadoop/hbase/rest/TableResource.java AL src/main/java/org/apache/hadoop/hbase/rest/package.html AL src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java AL src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPRequestWrapper.java AL src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPRequestStream.java AL src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseWrapper.java AL src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java AL src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseStream.java AL src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ColumnSchemaMessage.java !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableListMessage.java !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellSetMessage.java !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java AL src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java AL src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java AL src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java AL src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java AL src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java AL src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java AL src/main/java/org/apache/hadoop/hbase/rest/model/TableRegionModel.java AL src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java AL src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java AL src/main/java/org/apache/hadoop/hbase/rest/model/TableModel.java AL src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java AL src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java AL src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java AL src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java AL src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java AL src/main/java/org/apache/hadoop/hbase/rest/metrics/RESTStatistics.java AL src/main/java/org/apache/hadoop/hbase/rest/metrics/RESTMetrics.java AL src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java AL src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java AL src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java AL src/main/java/org/apache/hadoop/hbase/rest/provider/JAXBContextResolver.java AL src/main/java/org/apache/hadoop/hbase/rest/provider/producer/PlainTextMessageBodyProducer.java AL src/main/java/org/apache/hadoop/hbase/rest/provider/producer/ProtobufMessageBodyProducer.java AL src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java AL src/main/java/org/apache/hadoop/hbase/rest/client/RemoteAdmin.java AL src/main/java/org/apache/hadoop/hbase/rest/client/Client.java AL src/main/java/org/apache/hadoop/hbase/rest/client/Cluster.java AL src/main/java/org/apache/hadoop/hbase/rest/client/Response.java AL src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java AL src/main/java/org/apache/hadoop/hbase/replication/ReplicationZookeeper.java AL src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java AL src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceInterface.java AL src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkMetrics.java AL src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationStatistics.java AL src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationHLogReaderManager.java AL src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java AL src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java AL src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceMetrics.java AL src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java AL src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java AL src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java AL src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java AL src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java AL src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java AL src/main/java/org/apache/hadoop/hbase/filter/package-info.java AL src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java AL src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java AL src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java AL src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java AL src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java AL src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/Filter.java AL src/main/java/org/apache/hadoop/hbase/filter/WritableByteArrayComparable.java AL src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/FilterList.java AL src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java AL src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java AL src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java AL src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java AL src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java AL src/main/java/org/apache/hadoop/hbase/tool/WriteSinkCoprocessor.java AL src/main/java/org/apache/hadoop/hbase/tool/Canary.java AL src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java !????? src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java !????? src/main/java/org/apache/hadoop/hbase/protobuf/generated/ErrorHandlingProtos.java AL src/main/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java AL src/main/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/BoundedRangeFileInputStream.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/InvalidHFileException.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/SimpleBlockCache.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SlabItemActionWatcher.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SlabCache.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SingleSizeCache.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/slab/Slab.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheColumnFamilySummary.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/DoubleBlockCache.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/ReusableStreamGzipCodec.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/Cacheable.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/CachedBlock.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/CachedBlockQueue.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/CorruptHFileException.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java AL src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java AL src/main/java/org/apache/hadoop/hbase/io/DoubleOutputStream.java AL src/main/java/org/apache/hadoop/hbase/io/TimeRange.java AL src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java AL src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java AL src/main/java/org/apache/hadoop/hbase/io/encoding/CompressionState.java AL src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java AL src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java AL src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java AL src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java AL src/main/java/org/apache/hadoop/hbase/io/encoding/EncoderBufferTooSmallException.java AL src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java AL src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java AL src/main/java/org/apache/hadoop/hbase/io/WritableWithSize.java AL src/main/java/org/apache/hadoop/hbase/io/DataOutputOutputStream.java AL src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java AL src/main/java/org/apache/hadoop/hbase/io/HLogLink.java AL src/main/java/org/apache/hadoop/hbase/io/FileLink.java AL src/main/java/org/apache/hadoop/hbase/io/HFileLink.java AL src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java AL src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java AL src/main/java/org/apache/hadoop/hbase/io/HeapSize.java AL src/main/java/org/apache/hadoop/hbase/io/Reference.java AL src/main/java/org/apache/hadoop/hbase/Abortable.java AL src/main/java/org/apache/hadoop/hbase/avro/AvroServer.java AL src/main/java/org/apache/hadoop/hbase/avro/package.html AL src/main/java/org/apache/hadoop/hbase/avro/AvroUtil.java AL src/main/java/org/apache/hadoop/hbase/TableExistsException.java AL src/main/java/org/apache/hadoop/hbase/HServerLoad.java AL src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperNodeTracker.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ZKSplitLog.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperListener.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/DrainingServerTracker.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/RootRegionTracker.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ZKServerTool.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ClusterId.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ClusterStatusTracker.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/MetaNodeTracker.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTable.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableReadOnly.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServerArg.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAssign.java AL src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java AL src/main/java/org/apache/hadoop/hbase/YouAreDeadException.java AL src/main/java/org/apache/hadoop/hbase/HRegionInfo.java AL src/main/java/org/apache/hadoop/hbase/ClusterStatus.java AL src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java AL src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java AL src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java AL src/main/java/org/apache/hadoop/hbase/EmptyWatcher.java AL src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java AL src/main/java/org/apache/hadoop/hbase/metrics/MetricsMBeanBase.java AL src/main/java/org/apache/hadoop/hbase/metrics/ExactCounterMetric.java AL src/main/java/org/apache/hadoop/hbase/metrics/MetricsString.java AL src/main/java/org/apache/hadoop/hbase/metrics/file/TimeStampingFileContext.java AL src/main/java/org/apache/hadoop/hbase/metrics/PersistentMetricsTimeVaryingRate.java AL src/main/java/org/apache/hadoop/hbase/metrics/histogram/MetricsHistogram.java AL src/main/java/org/apache/hadoop/hbase/metrics/HBaseInfo.java AL src/main/java/org/apache/hadoop/hbase/metrics/MetricsRate.java AL src/main/java/org/apache/hadoop/hbase/KeyValue.java AL src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java AL src/main/java/org/apache/hadoop/hbase/codec/Decoder.java AL src/main/java/org/apache/hadoop/hbase/codec/Encoder.java AL src/main/java/org/apache/hadoop/hbase/codec/Codec.java AL src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java AL src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java AL src/main/java/org/apache/hadoop/hbase/codec/CodecException.java AL src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java AL src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java AL src/main/java/org/apache/hadoop/hbase/HBaseFileSystem.java AL src/main/java/org/apache/hadoop/hbase/constraint/Constraint.java AL src/main/java/org/apache/hadoop/hbase/constraint/package-info.java AL src/main/java/org/apache/hadoop/hbase/constraint/BaseConstraint.java AL src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java AL src/main/java/org/apache/hadoop/hbase/constraint/ConstraintException.java AL src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java AL src/main/java/org/apache/hadoop/hbase/RemoteExceptionHandler.java AL src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java AL src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java AL src/main/java/org/apache/hadoop/hbase/thrift/ThriftMetrics.java AL src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/TColumn.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/TCell.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/TRegionInfo.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/IllegalArgument.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/TIncrement.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/IOError.java !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/Mutation.java AL src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java AL src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java AL src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java AL src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java AL src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescerMBean.java AL src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java AL src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java AL src/main/java/org/apache/hadoop/hbase/TableNotFoundException.java AL src/main/java/org/apache/hadoop/hbase/HServerAddress.java AL src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java AL src/main/java/org/apache/hadoop/hbase/catalog/MetaMigrationRemovingHTD.java AL src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java AL src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java AL src/main/java/org/apache/hadoop/hbase/catalog/RootLocationEditor.java AL src/main/java/org/apache/hadoop/hbase/Stoppable.java AL src/main/java/org/apache/hadoop/hbase/HConstants.java AL src/main/java/org/apache/hadoop/hbase/HServerInfo.java AL src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java AL src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java AL src/main/java/org/apache/hadoop/hbase/executor/RegionTransitionData.java AL src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java AL src/main/java/org/apache/hadoop/hbase/HealthChecker.java AL src/main/java/org/apache/hadoop/hbase/ClockOutOfSyncException.java AL src/main/java/org/apache/hadoop/hbase/client/package-info.java AL src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java AL src/main/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java AL src/main/java/org/apache/hadoop/hbase/client/HTableFactory.java AL src/main/java/org/apache/hadoop/hbase/client/Result.java AL src/main/java/org/apache/hadoop/hbase/client/RowMutations.java AL src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java AL src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java AL src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java AL src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java AL src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java AL src/main/java/org/apache/hadoop/hbase/client/coprocessor/package-info.java AL src/main/java/org/apache/hadoop/hbase/client/coprocessor/Exec.java AL src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java AL src/main/java/org/apache/hadoop/hbase/client/coprocessor/ExecResult.java AL src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java AL src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java AL src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java AL src/main/java/org/apache/hadoop/hbase/client/Durability.java AL src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java AL src/main/java/org/apache/hadoop/hbase/client/HTableInterfaceFactory.java AL src/main/java/org/apache/hadoop/hbase/client/RowLock.java AL src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java AL src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java AL src/main/java/org/apache/hadoop/hbase/client/CoprocessorHConnection.java AL src/main/java/org/apache/hadoop/hbase/client/Append.java AL src/main/java/org/apache/hadoop/hbase/client/MultiPutResponse.java AL src/main/java/org/apache/hadoop/hbase/client/AbstractClientScanner.java AL src/main/java/org/apache/hadoop/hbase/client/Row.java AL src/main/java/org/apache/hadoop/hbase/client/Attributes.java AL src/main/java/org/apache/hadoop/hbase/client/Get.java AL src/main/java/org/apache/hadoop/hbase/client/Action.java AL src/main/java/org/apache/hadoop/hbase/client/Delete.java AL src/main/java/org/apache/hadoop/hbase/client/Put.java AL src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java AL src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java AL src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java AL src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java AL src/main/java/org/apache/hadoop/hbase/client/HTable.java AL src/main/java/org/apache/hadoop/hbase/client/HTablePool.java AL src/main/java/org/apache/hadoop/hbase/client/MultiPut.java AL src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java AL src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java AL src/main/java/org/apache/hadoop/hbase/client/Scan.java AL src/main/java/org/apache/hadoop/hbase/client/HConnection.java AL src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java AL src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java AL src/main/java/org/apache/hadoop/hbase/client/Increment.java AL src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java AL src/main/java/org/apache/hadoop/hbase/client/UserProvider.java AL src/main/java/org/apache/hadoop/hbase/client/MultiAction.java AL src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java AL src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java AL src/main/java/org/apache/hadoop/hbase/client/Mutation.java AL src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java AL src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java AL src/main/java/org/apache/hadoop/hbase/client/HTableUtil.java AL src/main/java/org/apache/hadoop/hbase/client/Operation.java AL src/site/resources/doap_Hbase.rdf B src/site/resources/images/architecture.gif !????? src/site/resources/images/hbase_logo.svg !????? src/site/resources/images/big_h_logo.svg B src/site/resources/images/hfile.png B src/site/resources/images/hadoop-logo.jpg B src/site/resources/images/favicon.ico B src/site/resources/images/replication_overview.png B src/site/resources/images/hfilev2.png B src/site/resources/images/big_h_logo.png B src/site/resources/images/hbase_logo.png AL src/site/resources/css/site.css !????? src/site/resources/css/freebsd_docbook.css AL src/site/site.vm AL src/site/site.xml AL src/site/xdoc/index.xml AL src/site/xdoc/resources.xml AL src/site/xdoc/old_news.xml AL src/site/xdoc/replication.xml AL src/site/xdoc/metrics.xml AL src/site/xdoc/acid-semantics.xml AL src/site/xdoc/cygwin.xml AL src/site/xdoc/pseudo-distributed.xml AL src/site/xdoc/sponsors.xml AL src/site/xdoc/bulk-loads.xml AL src/docbkx/book.xml AL src/docbkx/getting_started.xml AL src/docbkx/case_studies.xml AL src/docbkx/performance.xml AL src/docbkx/upgrading.xml AL src/docbkx/customization.xsl AL src/docbkx/community.xml AL src/docbkx/ops_mgt.xml AL src/docbkx/zookeeper.xml AL src/docbkx/developer.xml AL src/docbkx/troubleshooting.xml AL src/docbkx/shell.xml AL src/docbkx/configuration.xml AL src/docbkx/security.xml AL src/docbkx/preface.xml AL src/docbkx/external_apis.xml AL security/src/test/resources/hbase-site.xml AL security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java AL security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java AL security/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionsWatcher.java AL security/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java AL security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java AL security/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java AL security/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java AL security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java AL security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java AL security/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java AL security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java AL security/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java AL security/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java AL security/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java AL security/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java AL security/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java AL security/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java AL security/src/main/java/org/apache/hadoop/hbase/security/access/AccessControllerProtocol.java AL security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java AL security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadProtocol.java AL security/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java AL security/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java AL security/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationKey.java AL security/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java AL security/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationProtocol.java AL security/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java AL security/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java AL security/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java AL security/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java AL security/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java AL security/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java AL security/src/main/java/org/apache/hadoop/hbase/security/HBasePolicyProvider.java AL security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java AL security/src/main/java/org/apache/hadoop/hbase/ipc/SecureRpcEngine.java AL security/src/main/java/org/apache/hadoop/hbase/ipc/SecureConnectionHeader.java AL security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java !????? .arcconfig !????? .git/FETCH_HEAD !????? .git/description !????? .git/info/exclude !????? .git/gitk.cache !????? .git/COMMIT_EDITMSG !????? .git/ORIG_HEAD !????? .git/hooks/pre-push.sample !????? .git/hooks/pre-commit.sample !????? .git/hooks/update.sample !????? .git/hooks/applypatch-msg.sample !????? .git/hooks/pre-rebase.sample !????? .git/hooks/commit-msg.sample !????? .git/hooks/post-update.sample !????? .git/hooks/prepare-commit-msg.sample !????? .git/hooks/pre-applypatch.sample !????? .git/refs/tags/0.98.3RC0 !????? .git/refs/tags/0.94.20RC0 !????? .git/refs/heads/0.94 !????? .git/refs/remotes/origin/master !????? .git/refs/remotes/origin/0.96 !????? .git/refs/remotes/origin/0.94 !????? .git/refs/remotes/origin/hbase-10070 !????? .git/refs/remotes/origin/HEAD !????? .git/refs/remotes/origin/0.98 B .git/index !????? .git/logs/refs/heads/0.94 !????? .git/logs/refs/remotes/origin/master !????? .git/logs/refs/remotes/origin/0.96 !????? .git/logs/refs/remotes/origin/0.94 !????? .git/logs/refs/remotes/origin/hbase-10070 !????? .git/logs/refs/remotes/origin/HEAD !????? .git/logs/refs/remotes/origin/0.98 !????? .git/logs/HEAD !????? .git/HEAD !????? .git/packed-refs B .git/objects/4f/93d89c17d239668ac624ac221111d00a8f0474 B .git/objects/35/408b5d2a1ec8f50471d6899e366801b1cd2992 B .git/objects/2b/3f8b6ac16043072cae04c129e9d58fad3a4c32 B .git/objects/a6/7beee1718429d61f68000920b535e46dcd2d53 B .git/objects/47/fc8dd3fd6ec1cc8dd836d8abb0a8b906485922 B .git/objects/92/64a40cf894918a7a9c3b3d5a907f8feb7285a0 B .git/objects/20/ce4e5f117411f3eb2a444ace12754e6072eaf2 B .git/objects/2c/483a0a5c60b7c3c649b1458db37ce3e40687dc B .git/objects/db/401458b5fdf1d38f1771825922eb7bec17826b B .git/objects/54/98b638086d84c00926683e1763da11065474d1 B .git/objects/51/571e66b7c7439cb1b388d0aeac75b58d1c818b B .git/objects/pack/pack-b5a936381c06740222420bde0bbcdc9d77518885.pack B .git/objects/pack/pack-b70c6434e10d8faff183d58a107984a23d12b374.idx B .git/objects/pack/pack-794eca8e4eed5e6eb911276d48c5c4b1b6e0fd14.idx B .git/objects/pack/pack-794eca8e4eed5e6eb911276d48c5c4b1b6e0fd14.pack B .git/objects/pack/pack-b70c6434e10d8faff183d58a107984a23d12b374.pack B .git/objects/pack/pack-b5a936381c06740222420bde0bbcdc9d77518885.idx B .git/objects/ad/0abf22749f513a19a12622cdfe1f33cab5d294 B .git/objects/09/c60d770f2869ca315910ba0f9a5ee9797b1edc B .git/objects/7e/f02c0d5d4f7308bb0d05a3f8a61c34563ec325 B .git/objects/b5/75fb0297590250cd7396f2d7be0d61f9bb8927 B .git/objects/7a/5bdf42b54d947ffc23432f09fccd9cf31eb8f5 B .git/objects/ed/6c3054f880f644eff4fc175a091980f51a8384 B .git/objects/d0/ed9b4f68b969849b712e97bcc7a4cee60050c0 B .git/objects/a4/72038c0707ae66fd6acbb09d3ca1819f5078bc B .git/objects/11/ee6177a984fffe592525fe8596c50c5eb4085f !????? .git/config AL bin/zookeepers.sh AL bin/graceful_stop.sh AL bin/hbase-daemons.sh AL bin/local-regionservers.sh AL bin/hirb.rb AL bin/master-backup.sh AL bin/hbase-jruby AL bin/rolling-restart.sh AL bin/stop-hbase.sh AL bin/region_status.rb AL bin/hbase AL bin/local-master-backup.sh AL bin/replication/copy_tables_desc.rb AL bin/get-active-master.rb AL bin/start-hbase.sh AL bin/hbase-daemon.sh AL bin/hbase-config.sh AL bin/region_mover.rb AL bin/regionservers.sh !????? .gitignore !????? CHANGES.txt AL pom.xml AL dev-support/test-patch.properties AL dev-support/jdiffHBasePublicAPI_common.sh AL dev-support/hbase_jdiff_template.xml AL dev-support/test-util.sh AL dev-support/test-patch.sh AL dev-support/jdiffHBasePublicAPI.sh AL dev-support/smart-apply-patch.sh AL dev-support/hbase_jdiff_acrossSingularityTemplate.xml AL dev-support/hbasetests.sh AL dev-support/hbase_jdiff_afterSingularityTemplate.xml AL dev-support/findHangingTest.sh N LICENSE.txt !????? conf/regionservers !????? conf/log4j.properties AL conf/hbase-env.sh AL conf/hbase-site.xml AL conf/hbase-policy.xml !????? conf/hadoop-metrics.properties ***************************************************** Printing headers for files without AL header... ======================================================================= ==src/packages/deb/hbase.control/conffile ======================================================================= /etc/hbase/hadoop-metrics.properties /etc/hbase/hbase-env.sh /etc/hbase/hbase-site.xml /etc/hbase/log4j.properties /etc/hbase/regionservers ======================================================================= ==src/main/avro/hbase.avpr ======================================================================= { "protocol" : "HBase", "namespace" : "org.apache.hadoop.hbase.avro.generated", "types" : [ { "type" : "record", "name" : "AServerAddress", "fields" : [ { "name" : "hostname", "type" : "string" }, { "name" : "inetSocketAddress", "type" : "string" }, { "name" : "port", "type" : "int" } ] }, { "type" : "record", "name" : "ARegionLoad", "fields" : [ { "name" : "memStoreSizeMB", "type" : "int" }, { "name" : "name", "type" : "bytes" }, { "name" : "storefileIndexSizeMB", "type" : "int" }, { "name" : "storefiles", "type" : "int" }, { "name" : "storefileSizeMB", "type" : "int" }, { "name" : "stores", "type" : "int" } ] }, { "type" : "record", "name" : "AServerLoad", "fields" : [ { "name" : "load", "type" : "int" }, { "name" : "maxHeapMB", "type" : "int" }, { "name" : "memStoreSizeInMB", "type" : "int" ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnIncrement.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Represents a single cell and the amount to increment it by */ public class TColumnIncrement implements org.apache.thrift.TBase<TColumnIncrement, TColumnIncrement._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnIncrement"); private static final org.apache.thrift.protocol.TField FAMILY_FIELD_DESC = new org.apache.thrift.protocol.TField("family", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField QUALIFIER_FIELD_DESC = new org.apache.thrift.protocol.TField("qualifier", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField AMOUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("amount", org.apache.thrift.protocol.TType.I64, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TColumnIncrementStandardSchemeFactory()); schemes.put(TupleScheme.class, new TColumnIncrementTupleSchemeFactory()); } public ByteBuffer family; // required public ByteBuffer qualifier; // required public long amount; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class THBaseService { public interface Iface { /** * Test for the existence of columns in the table, as specified in the TGet. * * @return true if the specified TGet matches one or more keys, false if not * * @param table the table to check on * * @param get the TGet to check for */ public boolean exists(ByteBuffer table, TGet get) throws TIOError, org.apache.thrift.TException; /** * Method for getting data from a row. * * If the row cannot be found an empty Result is returned. * This can be checked by the empty field of the TResult * ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumn.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Addresses a single cell or multiple cells * in a HBase table by column family and optionally * a column qualifier and timestamp */ public class TColumn implements org.apache.thrift.TBase<TColumn, TColumn._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumn"); private static final org.apache.thrift.protocol.TField FAMILY_FIELD_DESC = new org.apache.thrift.protocol.TField("family", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField QUALIFIER_FIELD_DESC = new org.apache.thrift.protocol.TField("qualifier", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TColumnStandardSchemeFactory()); schemes.put(TupleScheme.class, new TColumnTupleSchemeFactory()); } public ByteBuffer family; // required public ByteBuffer qualifier; // optional public long timestamp; // optional ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTimeRange.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TTimeRange implements org.apache.thrift.TBase<TTimeRange, TTimeRange._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTimeRange"); private static final org.apache.thrift.protocol.TField MIN_STAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("minStamp", org.apache.thrift.protocol.TType.I64, (short)1); private static final org.apache.thrift.protocol.TField MAX_STAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("maxStamp", org.apache.thrift.protocol.TType.I64, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TTimeRangeStandardSchemeFactory()); schemes.put(TupleScheme.class, new TTimeRangeTupleSchemeFactory()); } public long minStamp; // required public long maxStamp; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { MIN_STAMP((short)1, "minStamp"), MAX_STAMP((short)2, "maxStamp"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TMutation.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Atomic mutation for the specified row. It can be either Put or Delete. */ public class TMutation extends org.apache.thrift.TUnion<TMutation, TMutation._Fields> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TMutation"); private static final org.apache.thrift.protocol.TField PUT_FIELD_DESC = new org.apache.thrift.protocol.TField("put", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField DELETE_SINGLE_FIELD_DESC = new org.apache.thrift.protocol.TField("deleteSingle", org.apache.thrift.protocol.TType.STRUCT, (short)2); /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { PUT((short)1, "put"), DELETE_SINGLE((short)2, "deleteSingle"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIOError.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A TIOError exception signals that an error occurred communicating * to the HBase master or a HBase region server. Also used to return * more general HBase error conditions. */ public class TIOError extends Exception implements org.apache.thrift.TBase<TIOError, TIOError._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIOError"); private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TIOErrorStandardSchemeFactory()); schemes.put(TupleScheme.class, new TIOErrorTupleSchemeFactory()); } public String message; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { MESSAGE((short)1, "message"); ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Any timestamps in the columns are ignored, use timeRange to select by timestamp. * Max versions defaults to 1. */ public class TScan implements org.apache.thrift.TBase<TScan, TScan._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan"); private static final org.apache.thrift.protocol.TField START_ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("startRow", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField STOP_ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("stopRow", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("columns", org.apache.thrift.protocol.TType.LIST, (short)3); private static final org.apache.thrift.protocol.TField CACHING_FIELD_DESC = new org.apache.thrift.protocol.TField("caching", org.apache.thrift.protocol.TType.I32, (short)4); private static final org.apache.thrift.protocol.TField MAX_VERSIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("maxVersions", org.apache.thrift.protocol.TType.I32, (short)5); private static final org.apache.thrift.protocol.TField TIME_RANGE_FIELD_DESC = new org.apache.thrift.protocol.TField("timeRange", org.apache.thrift.protocol.TType.STRUCT, (short)6); private static final org.apache.thrift.protocol.TField FILTER_STRING_FIELD_DESC = new org.apache.thrift.protocol.TField("filterString", org.apache.thrift.protocol.TType.STRING, (short)7); private static final org.apache.thrift.protocol.TField BATCH_SIZE_FIELD_DESC = new org.apache.thrift.protocol.TField("batchSize", org.apache.thrift.protocol.TType.I32, (short)8); private static final org.apache.thrift.protocol.TField ATTRIBUTES_FIELD_DESC = new org.apache.thrift.protocol.TField("attributes", org.apache.thrift.protocol.TType.MAP, (short)9); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TScanStandardSchemeFactory()); schemes.put(TupleScheme.class, new TScanTupleSchemeFactory()); ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Used to perform Put operations for a single row. * * Add column values to this object and they'll be added. * You can provide a default timestamp if the column values * don't have one. If you don't provide a default timestamp * the current time is inserted. * * You can also specify if this Put should be written * to the write-ahead Log (WAL) or not. It defaults to true. */ public class TPut implements org.apache.thrift.TBase<TPut, TPut._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TPut"); private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField COLUMN_VALUES_FIELD_DESC = new org.apache.thrift.protocol.TField("columnValues", org.apache.thrift.protocol.TType.LIST, (short)2); private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)3); private static final org.apache.thrift.protocol.TField WRITE_TO_WAL_FIELD_DESC = new org.apache.thrift.protocol.TField("writeToWal", org.apache.thrift.protocol.TType.BOOL, (short)4); private static final org.apache.thrift.protocol.TField ATTRIBUTES_FIELD_DESC = new org.apache.thrift.protocol.TField("attributes", org.apache.thrift.protocol.TType.MAP, (short)5); private static final org.apache.thrift.protocol.TField DURABILITY_FIELD_DESC = new org.apache.thrift.protocol.TField("durability", org.apache.thrift.protocol.TType.I32, (short)6); ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDurability.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import java.util.Map; import java.util.HashMap; import org.apache.thrift.TEnum; /** * Specify Durability: * - SKIP_WAL means do not write the Mutation to the WAL. * - ASYNC_WAL means write the Mutation to the WAL asynchronously, * - SYNC_WAL means write the Mutation to the WAL synchronously, * - FSYNC_WAL means Write the Mutation to the WAL synchronously and force the entries to disk. */ public enum TDurability implements org.apache.thrift.TEnum { SKIP_WAL(1), ASYNC_WAL(2), SYNC_WAL(3), FSYNC_WAL(4); private final int value; private TDurability(int value) { this.value = value; } /** * Get the integer value of this enum value, as defined in the Thrift IDL. */ public int getValue() { return value; } /** * Find a the enum type by its integer value, as defined in the Thrift IDL. * @return null if the value is not found. */ public static TDurability findByValue(int value) { switch (value) { case 1: return SKIP_WAL; case 2: return ASYNC_WAL; case 3: ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Used to perform Get operations on a single row. * * The scope can be further narrowed down by specifying a list of * columns or column families. * * To get everything for a row, instantiate a Get object with just the row to get. * To further define the scope of what to get you can add a timestamp or time range * with an optional maximum number of versions to return. * * If you specify a time range and a timestamp the range is ignored. * Timestamps on TColumns are ignored. */ public class TGet implements org.apache.thrift.TBase<TGet, TGet._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGet"); private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("columns", org.apache.thrift.protocol.TType.LIST, (short)2); private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)3); private static final org.apache.thrift.protocol.TField TIME_RANGE_FIELD_DESC = new org.apache.thrift.protocol.TField("timeRange", org.apache.thrift.protocol.TType.STRUCT, (short)4); private static final org.apache.thrift.protocol.TField MAX_VERSIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("maxVersions", org.apache.thrift.protocol.TType.I32, (short)5); ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TRowMutations.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A TRowMutations object is used to apply a number of Mutations to a single row. */ public class TRowMutations implements org.apache.thrift.TBase<TRowMutations, TRowMutations._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRowMutations"); private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField MUTATIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("mutations", org.apache.thrift.protocol.TType.LIST, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TRowMutationsStandardSchemeFactory()); schemes.put(TupleScheme.class, new TRowMutationsTupleSchemeFactory()); } public ByteBuffer row; // required public List<TMutation> mutations; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { ROW((short)1, "row"), ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIllegalArgument.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A TIllegalArgument exception indicates an illegal or invalid * argument was passed into a procedure. */ public class TIllegalArgument extends Exception implements org.apache.thrift.TBase<TIllegalArgument, TIllegalArgument._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIllegalArgument"); private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TIllegalArgumentStandardSchemeFactory()); schemes.put(TupleScheme.class, new TIllegalArgumentTupleSchemeFactory()); } public String message; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { MESSAGE((short)1, "message"); ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Used to perform Increment operations for a single row. * * You can specify if this Increment should be written * to the write-ahead Log (WAL) or not. It defaults to true. */ public class TIncrement implements org.apache.thrift.TBase<TIncrement, TIncrement._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIncrement"); private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("columns", org.apache.thrift.protocol.TType.LIST, (short)2); private static final org.apache.thrift.protocol.TField WRITE_TO_WAL_FIELD_DESC = new org.apache.thrift.protocol.TField("writeToWal", org.apache.thrift.protocol.TType.BOOL, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TIncrementStandardSchemeFactory()); schemes.put(TupleScheme.class, new TIncrementTupleSchemeFactory()); } public ByteBuffer row; // required public List<TColumnIncrement> columns; // required ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * if no Result is found, row and columnValues will not be set. */ public class TResult implements org.apache.thrift.TBase<TResult, TResult._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TResult"); private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField COLUMN_VALUES_FIELD_DESC = new org.apache.thrift.protocol.TField("columnValues", org.apache.thrift.protocol.TType.LIST, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TResultStandardSchemeFactory()); schemes.put(TupleScheme.class, new TResultTupleSchemeFactory()); } public ByteBuffer row; // optional public List<TColumnValue> columnValues; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { ROW((short)1, "row"), ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Used to perform Delete operations on a single row. * * The scope can be further narrowed down by specifying a list of * columns or column families as TColumns. * * Specifying only a family in a TColumn will delete the whole family. * If a timestamp is specified all versions with a timestamp less than * or equal to this will be deleted. If no timestamp is specified the * current time will be used. * * Specifying a family and a column qualifier in a TColumn will delete only * this qualifier. If a timestamp is specified only versions equal * to this timestamp will be deleted. If no timestamp is specified the * most recent version will be deleted. To delete all previous versions, * specify the DELETE_COLUMNS TDeleteType. * * The top level timestamp is only used if a complete row should be deleted * (i.e. no columns are passed) and if it is specified it works the same way * as if you had added a TColumn for every column family and this timestamp * (i.e. all versions older than or equal in all column families will be deleted) ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDeleteType.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import java.util.Map; import java.util.HashMap; import org.apache.thrift.TEnum; /** * Specify type of delete: * - DELETE_COLUMN means exactly one version will be removed, * - DELETE_COLUMNS means previous versions will also be removed. */ public enum TDeleteType implements org.apache.thrift.TEnum { DELETE_COLUMN(0), DELETE_COLUMNS(1); private final int value; private TDeleteType(int value) { this.value = value; } /** * Get the integer value of this enum value, as defined in the Thrift IDL. */ public int getValue() { return value; } /** * Find a the enum type by its integer value, as defined in the Thrift IDL. * @return null if the value is not found. */ public static TDeleteType findByValue(int value) { switch (value) { case 0: return DELETE_COLUMN; case 1: return DELETE_COLUMNS; default: return null; } } } ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnValue.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift2.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Represents a single cell and its value. */ public class TColumnValue implements org.apache.thrift.TBase<TColumnValue, TColumnValue._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnValue"); private static final org.apache.thrift.protocol.TField FAMILY_FIELD_DESC = new org.apache.thrift.protocol.TField("family", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField QUALIFIER_FIELD_DESC = new org.apache.thrift.protocol.TField("qualifier", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("value", org.apache.thrift.protocol.TType.STRING, (short)3); private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)4); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TColumnValueStandardSchemeFactory()); schemes.put(TupleScheme.class, new TColumnValueTupleSchemeFactory()); } public ByteBuffer family; // required public ByteBuffer qualifier; // required public ByteBuffer value; // required public long timestamp; // optional ======================================================================= ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java ======================================================================= // Generated by the protocol buffer compiler. DO NOT EDIT! // source: StorageClusterStatusMessage.proto package org.apache.hadoop.hbase.rest.protobuf.generated; public final class StorageClusterStatusMessage { private StorageClusterStatusMessage() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface StorageClusterStatusOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node liveNodes = 1; java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node> getLiveNodesList(); org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node getLiveNodes(int index); int getLiveNodesCount(); java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.NodeOrBuilder> getLiveNodesOrBuilderList(); org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.NodeOrBuilder getLiveNodesOrBuilder( int index); // repeated string deadNodes = 2; java.util.List<String> getDeadNodesList(); int getDeadNodesCount(); String getDeadNodes(int index); // optional int32 regions = 3; boolean hasRegions(); int getRegions(); // optional int32 requests = 4; boolean hasRequests(); int getRequests(); // optional double averageLoad = 5; boolean hasAverageLoad(); double getAverageLoad(); } public static final class StorageClusterStatus extends com.google.protobuf.GeneratedMessage implements StorageClusterStatusOrBuilder { // Use StorageClusterStatus.newBuilder() to construct. private StorageClusterStatus(Builder builder) { super(builder); } private StorageClusterStatus(boolean noInit) {} private static final StorageClusterStatus defaultInstance; ======================================================================= ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java ======================================================================= // Generated by the protocol buffer compiler. DO NOT EDIT! // source: CellMessage.proto package org.apache.hadoop.hbase.rest.protobuf.generated; public final class CellMessage { private CellMessage() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface CellOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bytes row = 1; boolean hasRow(); com.google.protobuf.ByteString getRow(); // optional bytes column = 2; boolean hasColumn(); com.google.protobuf.ByteString getColumn(); // optional int64 timestamp = 3; boolean hasTimestamp(); long getTimestamp(); // optional bytes data = 4; boolean hasData(); com.google.protobuf.ByteString getData(); } public static final class Cell extends com.google.protobuf.GeneratedMessage implements CellOrBuilder { // Use Cell.newBuilder() to construct. private Cell(Builder builder) { super(builder); } private Cell(boolean noInit) {} private static final Cell defaultInstance; public static Cell getDefaultInstance() { return defaultInstance; } public Cell getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor; ======================================================================= ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java ======================================================================= // Generated by the protocol buffer compiler. DO NOT EDIT! // source: VersionMessage.proto package org.apache.hadoop.hbase.rest.protobuf.generated; public final class VersionMessage { private VersionMessage() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface VersionOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional string restVersion = 1; boolean hasRestVersion(); String getRestVersion(); // optional string jvmVersion = 2; boolean hasJvmVersion(); String getJvmVersion(); // optional string osVersion = 3; boolean hasOsVersion(); String getOsVersion(); // optional string serverVersion = 4; boolean hasServerVersion(); String getServerVersion(); // optional string jerseyVersion = 5; boolean hasJerseyVersion(); String getJerseyVersion(); } public static final class Version extends com.google.protobuf.GeneratedMessage implements VersionOrBuilder { // Use Version.newBuilder() to construct. private Version(Builder builder) { super(builder); } private Version(boolean noInit) {} private static final Version defaultInstance; public static Version getDefaultInstance() { return defaultInstance; } public Version getDefaultInstanceForType() { return defaultInstance; } ======================================================================= ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ColumnSchemaMessage.java ======================================================================= // Generated by the protocol buffer compiler. DO NOT EDIT! // source: ColumnSchemaMessage.proto package org.apache.hadoop.hbase.rest.protobuf.generated; public final class ColumnSchemaMessage { private ColumnSchemaMessage() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface ColumnSchemaOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional string name = 1; boolean hasName(); String getName(); // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema.Attribute attrs = 2; java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> getAttrsList(); org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getAttrs(int index); int getAttrsCount(); java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.AttributeOrBuilder> getAttrsOrBuilderList(); org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.AttributeOrBuilder getAttrsOrBuilder( int index); // optional int32 ttl = 3; boolean hasTtl(); int getTtl(); // optional int32 maxVersions = 4; boolean hasMaxVersions(); int getMaxVersions(); // optional string compression = 5; boolean hasCompression(); String getCompression(); } public static final class ColumnSchema extends com.google.protobuf.GeneratedMessage implements ColumnSchemaOrBuilder { // Use ColumnSchema.newBuilder() to construct. private ColumnSchema(Builder builder) { super(builder); } private ColumnSchema(boolean noInit) {} private static final ColumnSchema defaultInstance; public static ColumnSchema getDefaultInstance() { ======================================================================= ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableListMessage.java ======================================================================= // Generated by the protocol buffer compiler. DO NOT EDIT! // source: TableListMessage.proto package org.apache.hadoop.hbase.rest.protobuf.generated; public final class TableListMessage { private TableListMessage() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface TableListOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated string name = 1; java.util.List<String> getNameList(); int getNameCount(); String getName(int index); } public static final class TableList extends com.google.protobuf.GeneratedMessage implements TableListOrBuilder { // Use TableList.newBuilder() to construct. private TableList(Builder builder) { super(builder); } private TableList(boolean noInit) {} private static final TableList defaultInstance; public static TableList getDefaultInstance() { return defaultInstance; } public TableList getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable; } // repeated string name = 1; public static final int NAME_FIELD_NUMBER = 1; private com.google.protobuf.LazyStringList name_; public java.util.List<String> ======================================================================= ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java ======================================================================= // Generated by the protocol buffer compiler. DO NOT EDIT! // source: TableInfoMessage.proto package org.apache.hadoop.hbase.rest.protobuf.generated; public final class TableInfoMessage { private TableInfoMessage() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface TableInfoOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string name = 1; boolean hasName(); String getName(); // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region regions = 2; java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region> getRegionsList(); org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region getRegions(int index); int getRegionsCount(); java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.RegionOrBuilder> getRegionsOrBuilderList(); org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.RegionOrBuilder getRegionsOrBuilder( int index); } public static final class TableInfo extends com.google.protobuf.GeneratedMessage implements TableInfoOrBuilder { // Use TableInfo.newBuilder() to construct. private TableInfo(Builder builder) { super(builder); } private TableInfo(boolean noInit) {} private static final TableInfo defaultInstance; public static TableInfo getDefaultInstance() { return defaultInstance; } public TableInfo getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor; } ======================================================================= ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java ======================================================================= // Generated by the protocol buffer compiler. DO NOT EDIT! // source: TableSchemaMessage.proto package org.apache.hadoop.hbase.rest.protobuf.generated; public final class TableSchemaMessage { private TableSchemaMessage() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface TableSchemaOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional string name = 1; boolean hasName(); String getName(); // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute attrs = 2; java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> getAttrsList(); org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute getAttrs(int index); int getAttrsCount(); java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.AttributeOrBuilder> getAttrsOrBuilderList(); org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.AttributeOrBuilder getAttrsOrBuilder( int index); // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema columns = 3; java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema> getColumnsList(); org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema getColumns(int index); int getColumnsCount(); java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchemaOrBuilder> getColumnsOrBuilderList(); org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchemaOrBuilder getColumnsOrBuilder( int index); // optional bool inMemory = 4; boolean hasInMemory(); boolean getInMemory(); // optional bool readOnly = 5; boolean hasReadOnly(); boolean getReadOnly(); } public static final class TableSchema extends com.google.protobuf.GeneratedMessage implements TableSchemaOrBuilder { // Use TableSchema.newBuilder() to construct. private TableSchema(Builder builder) { ======================================================================= ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellSetMessage.java ======================================================================= // Generated by the protocol buffer compiler. DO NOT EDIT! // source: CellSetMessage.proto package org.apache.hadoop.hbase.rest.protobuf.generated; public final class CellSetMessage { private CellSetMessage() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface CellSetOrBuilder extends com.google.protobuf.MessageOrBuilder { // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.CellSet.Row rows = 1; java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row> getRowsList(); org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row getRows(int index); int getRowsCount(); java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.RowOrBuilder> getRowsOrBuilderList(); org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.RowOrBuilder getRowsOrBuilder( int index); } public static final class CellSet extends com.google.protobuf.GeneratedMessage implements CellSetOrBuilder { // Use CellSet.newBuilder() to construct. private CellSet(Builder builder) { super(builder); } private CellSet(boolean noInit) {} private static final CellSet defaultInstance; public static CellSet getDefaultInstance() { return defaultInstance; } public CellSet getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_fieldAccessorTable; } ======================================================================= ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java ======================================================================= // Generated by the protocol buffer compiler. DO NOT EDIT! // source: ScannerMessage.proto package org.apache.hadoop.hbase.rest.protobuf.generated; public final class ScannerMessage { private ScannerMessage() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface ScannerOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional bytes startRow = 1; boolean hasStartRow(); com.google.protobuf.ByteString getStartRow(); // optional bytes endRow = 2; boolean hasEndRow(); com.google.protobuf.ByteString getEndRow(); // repeated bytes columns = 3; java.util.List<com.google.protobuf.ByteString> getColumnsList(); int getColumnsCount(); com.google.protobuf.ByteString getColumns(int index); // optional int32 batch = 4; boolean hasBatch(); int getBatch(); // optional int64 startTime = 5; boolean hasStartTime(); long getStartTime(); // optional int64 endTime = 6; boolean hasEndTime(); long getEndTime(); // optional int32 maxVersions = 7; boolean hasMaxVersions(); int getMaxVersions(); // optional string filter = 8; boolean hasFilter(); String getFilter(); } public static final class Scanner extends com.google.protobuf.GeneratedMessage implements ScannerOrBuilder { // Use Scanner.newBuilder() to construct. ======================================================================= ==src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java ======================================================================= // Generated by the protocol buffer compiler. DO NOT EDIT! // source: hbase.proto package org.apache.hadoop.hbase.protobuf.generated; public final class HBaseProtos { private HBaseProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface SnapshotDescriptionOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string name = 1; boolean hasName(); String getName(); // optional string table = 2; boolean hasTable(); String getTable(); // optional int64 creationTime = 3 [default = 0]; boolean hasCreationTime(); long getCreationTime(); // optional .SnapshotDescription.Type type = 4 [default = FLUSH]; boolean hasType(); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType(); // optional int32 version = 5; boolean hasVersion(); int getVersion(); } public static final class SnapshotDescription extends com.google.protobuf.GeneratedMessage implements SnapshotDescriptionOrBuilder { // Use SnapshotDescription.newBuilder() to construct. private SnapshotDescription(Builder builder) { super(builder); } private SnapshotDescription(boolean noInit) {} private static final SnapshotDescription defaultInstance; public static SnapshotDescription getDefaultInstance() { return defaultInstance; } public SnapshotDescription getDefaultInstanceForType() { return defaultInstance; } ======================================================================= ==src/main/java/org/apache/hadoop/hbase/protobuf/generated/ErrorHandlingProtos.java ======================================================================= // Generated by the protocol buffer compiler. DO NOT EDIT! // source: ErrorHandling.proto package org.apache.hadoop.hbase.protobuf.generated; public final class ErrorHandlingProtos { private ErrorHandlingProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface StackTraceElementMessageOrBuilder extends com.google.protobuf.MessageOrBuilder { // optional string declaringClass = 1; boolean hasDeclaringClass(); String getDeclaringClass(); // optional string methodName = 2; boolean hasMethodName(); String getMethodName(); // optional string fileName = 3; boolean hasFileName(); String getFileName(); // optional int32 lineNumber = 4; boolean hasLineNumber(); int getLineNumber(); } public static final class StackTraceElementMessage extends com.google.protobuf.GeneratedMessage implements StackTraceElementMessageOrBuilder { // Use StackTraceElementMessage.newBuilder() to construct. private StackTraceElementMessage(Builder builder) { super(builder); } private StackTraceElementMessage(boolean noInit) {} private static final StackTraceElementMessage defaultInstance; public static StackTraceElementMessage getDefaultInstance() { return defaultInstance; } public StackTraceElementMessage getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_descriptor; ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/TColumn.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Holds column name and the cell. */ public class TColumn implements org.apache.thrift.TBase<TColumn, TColumn._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumn"); private static final org.apache.thrift.protocol.TField COLUMN_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("columnName", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField CELL_FIELD_DESC = new org.apache.thrift.protocol.TField("cell", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TColumnStandardSchemeFactory()); schemes.put(TupleScheme.class, new TColumnTupleSchemeFactory()); } public ByteBuffer columnName; // required public TCell cell; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { COLUMN_NAME((short)1, "columnName"), ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class Hbase { public interface Iface { /** * Brings a table on-line (enables it) * * @param tableName name of the table */ public void enableTable(ByteBuffer tableName) throws IOError, org.apache.thrift.TException; /** * Disables a table (takes it off-line) If it is being served, the master * will tell the servers to stop serving it. * * @param tableName name of the table */ public void disableTable(ByteBuffer tableName) throws IOError, org.apache.thrift.TException; /** * @return true if table is on-line ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A Scan object is used to specify scanner parameters when opening a scanner. */ public class TScan implements org.apache.thrift.TBase<TScan, TScan._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan"); private static final org.apache.thrift.protocol.TField START_ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("startRow", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField STOP_ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("stopRow", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)3); private static final org.apache.thrift.protocol.TField COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("columns", org.apache.thrift.protocol.TType.LIST, (short)4); private static final org.apache.thrift.protocol.TField CACHING_FIELD_DESC = new org.apache.thrift.protocol.TField("caching", org.apache.thrift.protocol.TType.I32, (short)5); private static final org.apache.thrift.protocol.TField FILTER_STRING_FIELD_DESC = new org.apache.thrift.protocol.TField("filterString", org.apache.thrift.protocol.TType.STRING, (short)6); private static final org.apache.thrift.protocol.TField SORT_COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("sortColumns", org.apache.thrift.protocol.TType.BOOL, (short)7); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TScanStandardSchemeFactory()); schemes.put(TupleScheme.class, new TScanTupleSchemeFactory()); } public ByteBuffer startRow; // optional ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An HColumnDescriptor contains information about a column family * such as the number of versions, compression settings, etc. It is * used as input when creating a table or adding a column. */ public class ColumnDescriptor implements org.apache.thrift.TBase<ColumnDescriptor, ColumnDescriptor._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ColumnDescriptor"); private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField MAX_VERSIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("maxVersions", org.apache.thrift.protocol.TType.I32, (short)2); private static final org.apache.thrift.protocol.TField COMPRESSION_FIELD_DESC = new org.apache.thrift.protocol.TField("compression", org.apache.thrift.protocol.TType.STRING, (short)3); private static final org.apache.thrift.protocol.TField IN_MEMORY_FIELD_DESC = new org.apache.thrift.protocol.TField("inMemory", org.apache.thrift.protocol.TType.BOOL, (short)4); private static final org.apache.thrift.protocol.TField BLOOM_FILTER_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("bloomFilterType", org.apache.thrift.protocol.TType.STRING, (short)5); private static final org.apache.thrift.protocol.TField BLOOM_FILTER_VECTOR_SIZE_FIELD_DESC = new org.apache.thrift.protocol.TField("bloomFilterVectorSize", org.apache.thrift.protocol.TType.I32, (short)6); private static final org.apache.thrift.protocol.TField BLOOM_FILTER_NB_HASHES_FIELD_DESC = new org.apache.thrift.protocol.TField("bloomFilterNbHashes", org.apache.thrift.protocol.TType.I32, (short)7); private static final org.apache.thrift.protocol.TField BLOCK_CACHE_ENABLED_FIELD_DESC = new org.apache.thrift.protocol.TField("blockCacheEnabled", org.apache.thrift.protocol.TType.BOOL, (short)8); private static final org.apache.thrift.protocol.TField TIME_TO_LIVE_FIELD_DESC = new org.apache.thrift.protocol.TField("timeToLive", org.apache.thrift.protocol.TType.I32, (short)9); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new ColumnDescriptorStandardSchemeFactory()); ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/TCell.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * TCell - Used to transport a cell value (byte[]) and the timestamp it was * stored with together as a result for get and getRow methods. This promotes * the timestamp of a cell to a first-class value, making it easy to take * note of temporal data. Cell is used all the way from HStore up to HTable. */ public class TCell implements org.apache.thrift.TBase<TCell, TCell._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCell"); private static final org.apache.thrift.protocol.TField VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("value", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TCellStandardSchemeFactory()); schemes.put(TupleScheme.class, new TCellTupleSchemeFactory()); } public ByteBuffer value; // required public long timestamp; // required ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An AlreadyExists exceptions signals that a table with the specified * name already exists */ public class AlreadyExists extends Exception implements org.apache.thrift.TBase<AlreadyExists, AlreadyExists._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AlreadyExists"); private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new AlreadyExistsStandardSchemeFactory()); schemes.put(TupleScheme.class, new AlreadyExistsTupleSchemeFactory()); } public String message; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { MESSAGE((short)1, "message"); ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/TRegionInfo.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A TRegionInfo contains information about an HTable region. */ public class TRegionInfo implements org.apache.thrift.TBase<TRegionInfo, TRegionInfo._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRegionInfo"); private static final org.apache.thrift.protocol.TField START_KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("startKey", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField END_KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("endKey", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.I64, (short)3); private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)4); private static final org.apache.thrift.protocol.TField VERSION_FIELD_DESC = new org.apache.thrift.protocol.TField("version", org.apache.thrift.protocol.TType.BYTE, (short)5); private static final org.apache.thrift.protocol.TField SERVER_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("serverName", org.apache.thrift.protocol.TType.STRING, (short)6); private static final org.apache.thrift.protocol.TField PORT_FIELD_DESC = new org.apache.thrift.protocol.TField("port", org.apache.thrift.protocol.TType.I32, (short)7); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TRegionInfoStandardSchemeFactory()); schemes.put(TupleScheme.class, new TRegionInfoTupleSchemeFactory()); } public ByteBuffer startKey; // required ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/IllegalArgument.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An IllegalArgument exception indicates an illegal or invalid * argument was passed into a procedure. */ public class IllegalArgument extends Exception implements org.apache.thrift.TBase<IllegalArgument, IllegalArgument._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IllegalArgument"); private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new IllegalArgumentStandardSchemeFactory()); schemes.put(TupleScheme.class, new IllegalArgumentTupleSchemeFactory()); } public String message; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { MESSAGE((short)1, "message"); ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Holds row name and then a map of columns to cells. */ public class TRowResult implements org.apache.thrift.TBase<TRowResult, TRowResult._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRowResult"); private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("columns", org.apache.thrift.protocol.TType.MAP, (short)2); private static final org.apache.thrift.protocol.TField SORTED_COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("sortedColumns", org.apache.thrift.protocol.TType.LIST, (short)3); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TRowResultStandardSchemeFactory()); schemes.put(TupleScheme.class, new TRowResultTupleSchemeFactory()); } public ByteBuffer row; // required public Map<ByteBuffer,TCell> columns; // optional public List<TColumn> sortedColumns; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/TIncrement.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * For increments that are not incrementColumnValue * equivalents. */ public class TIncrement implements org.apache.thrift.TBase<TIncrement, TIncrement._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIncrement"); private static final org.apache.thrift.protocol.TField TABLE_FIELD_DESC = new org.apache.thrift.protocol.TField("table", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField COLUMN_FIELD_DESC = new org.apache.thrift.protocol.TField("column", org.apache.thrift.protocol.TType.STRING, (short)3); private static final org.apache.thrift.protocol.TField AMMOUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("ammount", org.apache.thrift.protocol.TType.I64, (short)4); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new TIncrementStandardSchemeFactory()); schemes.put(TupleScheme.class, new TIncrementTupleSchemeFactory()); } public ByteBuffer table; // required public ByteBuffer row; // required public ByteBuffer column; // required ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A BatchMutation object is used to apply a number of Mutations to a single row. */ public class BatchMutation implements org.apache.thrift.TBase<BatchMutation, BatchMutation._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BatchMutation"); private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField MUTATIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("mutations", org.apache.thrift.protocol.TType.LIST, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new BatchMutationStandardSchemeFactory()); schemes.put(TupleScheme.class, new BatchMutationTupleSchemeFactory()); } public ByteBuffer row; // required public List<Mutation> mutations; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { ROW((short)1, "row"), ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/IOError.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An IOError exception signals that an error occurred communicating * to the Hbase master or an Hbase region server. Also used to return * more general Hbase error conditions. */ public class IOError extends Exception implements org.apache.thrift.TBase<IOError, IOError._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IOError"); private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new IOErrorStandardSchemeFactory()); schemes.put(TupleScheme.class, new IOErrorTupleSchemeFactory()); } public String message; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { MESSAGE((short)1, "message"); ======================================================================= ==src/main/java/org/apache/hadoop/hbase/thrift/generated/Mutation.java ======================================================================= /** * Autogenerated by Thrift Compiler (0.8.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hbase.thrift.generated; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A Mutation object is used to either update or delete a column-value. */ public class Mutation implements org.apache.thrift.TBase<Mutation, Mutation._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Mutation"); private static final org.apache.thrift.protocol.TField IS_DELETE_FIELD_DESC = new org.apache.thrift.protocol.TField("isDelete", org.apache.thrift.protocol.TType.BOOL, (short)1); private static final org.apache.thrift.protocol.TField COLUMN_FIELD_DESC = new org.apache.thrift.protocol.TField("column", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("value", org.apache.thrift.protocol.TType.STRING, (short)3); private static final org.apache.thrift.protocol.TField WRITE_TO_WAL_FIELD_DESC = new org.apache.thrift.protocol.TField("writeToWAL", org.apache.thrift.protocol.TType.BOOL, (short)4); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new MutationStandardSchemeFactory()); schemes.put(TupleScheme.class, new MutationTupleSchemeFactory()); } public boolean isDelete; // required public ByteBuffer column; // required public ByteBuffer value; // required public boolean writeToWAL; // required ======================================================================= ==src/site/resources/images/hbase_logo.svg ======================================================================= <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Generator: Adobe Illustrator 15.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) --> <svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://creativecommons.org/ns#" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" version="1.1" id="Layer_1" x="0px" y="0px" width="792px" height="612px" viewBox="0 0 792 612" enable-background="new 0 0 792 612" xml:space="preserve" inkscape:version="0.48.4 r9939" sodipodi:docname="hbase_banner_logo.png" inkscape:export-filename="hbase_logo_filledin.png" inkscape:export-xdpi="90" inkscape:export-ydpi="90"><metadata id="metadata3285"><rdf:RDF><cc:Work rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title></dc:title></cc:Work></rdf:RDF></metadata><defs id="defs3283" /><sodipodi:namedview pagecolor="#ffffff" bordercolor="#666666" borderopacity="1" objecttolerance="10" gridtolerance="10" guidetolerance="10" inkscape:pageopacity="0" inkscape:pageshadow="2" inkscape:window-width="1131" inkscape:window-height="715" id="namedview3281" showgrid="false" inkscape:zoom="4.3628026" inkscape:cx="328.98554" inkscape:cy="299.51695" inkscape:window-x="752" inkscape:window-y="456" inkscape:window-maximized="0" inkscape:current-layer="Layer_1" /> <path d="m 233.586,371.672 -9.895,0 0,-51.583 9.895,0 0,51.583 z m -9.77344,-51.59213 -0.12156,-31.94487 9.895,0 -0.0405,31.98539 z m -0.12156,51.59213 -9.896,0 0,-32.117 -63.584,0 0,32.117 -19.466,0 0,-83.537 19.466,0 0,31.954 55.128,0 8.457,0 9.896,0 0,51.583 z m 0,-83.537 -9.896,0 0,31.98539 10.01756,-0.0405 z" ======================================================================= ==src/site/resources/images/big_h_logo.svg ======================================================================= <?xml version="1.0" encoding="UTF-8" standalone="no"?> <!-- Generator: Adobe Illustrator 15.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) --> <svg xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://creativecommons.org/ns#" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" version="1.1" id="Layer_1" x="0px" y="0px" width="792px" height="612px" viewBox="0 0 792 612" enable-background="new 0 0 792 612" xml:space="preserve" inkscape:version="0.48.4 r9939" sodipodi:docname="big_h_same_font_hbase3_logo.png" inkscape:export-filename="big_h_bitmap.png" inkscape:export-xdpi="90" inkscape:export-ydpi="90"><metadata id="metadata3693"><rdf:RDF><cc:Work rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title></dc:title></cc:Work></rdf:RDF></metadata><defs id="defs3691" /><sodipodi:namedview pagecolor="#000000" bordercolor="#666666" borderopacity="1" objecttolerance="10" gridtolerance="10" guidetolerance="10" inkscape:pageopacity="0" inkscape:pageshadow="2" inkscape:window-width="1440" inkscape:window-height="856" id="namedview3689" showgrid="false" inkscape:zoom="2.1814013" inkscape:cx="415.39305" inkscape:cy="415.72702" inkscape:window-x="1164" inkscape:window-y="22" inkscape:window-maximized="0" inkscape:current-layer="Layer_1" /> ======================================================================= ==src/site/resources/css/freebsd_docbook.css ======================================================================= /* * Copyright (c) 2001, 2003, 2010 The FreeBSD Documentation Project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * $FreeBSD: doc/share/misc/docbook.css,v 1.15 2010/03/20 04:15:01 hrs Exp $ */ BODY ADDRESS { line-height: 1.3; margin: .6em 0; } BODY BLOCKQUOTE { margin-top: .75em; line-height: 1.5; margin-bottom: .75em; } HTML BODY { margin: 1em 8% 1em 10%; line-height: 1.2; } .LEGALNOTICE { font-size: small; font-variant: small-caps; } BODY DIV { ======================================================================= ==.arcconfig ======================================================================= { "project_id" : "hbase", "conduit_uri" : "https://reviews.facebook.net/", "copyright_holder" : "Apache Software Foundation", "phutil_libraries" : { "arclib" : ".arc_jira_lib" }, "arcanist_configuration" : "ArcJIRAConfiguration", "jira_project" : "HBASE", "jira_api_url" : "https://issues.apache.org/jira/si/" } ======================================================================= ==.git/FETCH_HEAD ======================================================================= 35408b5d2a1ec8f50471d6899e366801b1cd2992 branch '0.94' of https://git-wip-us.apache.org/repos/asf/hbase 225a9ff80814ba900567160bd66c2790ac395ea6 not-for-merge branch '0.1' of https://git-wip-us.apache.org/repos/asf/hbase 76bd7a435717e314cfcfbe6a5125cc3bc9f6600c not-for-merge branch '0.18' of https://git-wip-us.apache.org/repos/asf/hbase cba75045e43e0e7b5ea45814430833c6b05e2eae not-for-merge branch '0.19' of https://git-wip-us.apache.org/repos/asf/hbase 32a0de7c563609280cda0cff1726800385a58498 not-for-merge branch '0.19_on_hadoop_0.18' of https://git-wip-us.apache.org/repos/asf/hbase b64b592f237e873bcb96b87c34c8537ee9af5ed4 not-for-merge branch '0.2' of https://git-wip-us.apache.org/repos/asf/hbase 3196c23cdc5c5a6b7089c85ffa500a9f754c0f49 not-for-merge branch '0.20' of https://git-wip-us.apache.org/repos/asf/hbase 61cd9b450ca8fef698e6032ba38f3dcc6d54d645 not-for-merge branch '0.20_on_hadoop-0.18.3' of https://git-wip-us.apache.org/repos/asf/hbase fc685e7ab32d437dd1f0f3dcab829493888d159f not-for-merge branch '0.20_on_hadoop-0.21' of https://git-wip-us.apache.org/repos/asf/hbase 2f8f236014c202250db178ad4a64f25dd79f371b not-for-merge branch '0.89' of https://git-wip-us.apache.org/repos/asf/hbase 9d18d00259702b0a622127479cc2d1e56ad0a6c1 not-for-merge branch '0.89-fb' of https://git-wip-us.apache.org/repos/asf/hbase 31dcca3d1eea10cca90460911134bbb20e41dc2e not-for-merge branch '0.89-fb-accidentally-wiped-commit-log' of https://git-wip-us.apache.org/repos/asf/hbase dffc4672fa8bb4fa98acabf8ad29553310e77b2b not-for-merge branch '0.89.0621' of https://git-wip-us.apache.org/repos/asf/hbase 1bc79b40c23749ffd696624661699b19e677bb95 not-for-merge branch '0.89.20100621' of https://git-wip-us.apache.org/repos/asf/hbase 526f9e14fc92b3a4c76b1d58ebef1c91f050ae75 not-for-merge branch '0.89.20100726' of https://git-wip-us.apache.org/repos/asf/hbase d43f89f72b6e90075527314d04fd444b233043fa not-for-merge branch '0.89.20100830' of https://git-wip-us.apache.org/repos/asf/hbase 1c2775af4490b89b24a8cad7ce41798da95aa387 not-for-merge branch '0.89.20100924' of https://git-wip-us.apache.org/repos/asf/hbase d39548939457d84b033eddbcf9e641c00a6d2e35 not-for-merge branch '0.90' of https://git-wip-us.apache.org/repos/asf/hbase a6208ec1bcb9ec5002b5fab0262e8107d7b7389a not-for-merge branch '0.90_coprocessors' of https://git-wip-us.apache.org/repos/asf/hbase 1cf0c82d71dd13c41ddb2a3cb5d82058798405fc not-for-merge branch '0.90_master_rewrite' of https://git-wip-us.apache.org/repos/asf/hbase 4037c8de59598b3241bcf2060aea6f253176d782 not-for-merge branch '0.92' of https://git-wip-us.apache.org/repos/asf/hbase 2cf68bdc1fbaf20be72819dd5b9ef204f30457c4 not-for-merge branch '0.92.0rc4' of https://git-wip-us.apache.org/repos/asf/hbase 0005cd2c2420f9d6e1d27dec3841b6d76d06c07a not-for-merge branch '0.94-test' of https://git-wip-us.apache.org/repos/asf/hbase 4793668c1d70b7a784e12a782b79d15099117ea0 not-for-merge branch '0.95' of https://git-wip-us.apache.org/repos/asf/hbase 7d56e0523fd9eacf074b602a56bf48f98acea0c8 not-for-merge branch '0.96' of https://git-wip-us.apache.org/repos/asf/hbase 503709fdf67d335fcb96f9a60a4b971de01479e1 not-for-merge branch '0.98' of https://git-wip-us.apache.org/repos/asf/hbase b2ee50a3cf81c0774cf8d9ee3a3760a0e961c3d1 not-for-merge branch 'former_0.20' of https://git-wip-us.apache.org/repos/asf/hbase 0abda799aa395ac0352de9a4720a6662e16c27d8 not-for-merge branch 'hbase-10070' of https://git-wip-us.apache.org/repos/asf/hbase 9b2f9d5316776f4c510ceb304703be3d801722e5 not-for-merge branch 'hbase-7290' of https://git-wip-us.apache.org/repos/asf/hbase ca65f47aee238e6071dab5c9d4fd2323550ea82e not-for-merge branch 'hbase-7290v2' of https://git-wip-us.apache.org/repos/asf/hbase be92c350d9c117c94dc3bf41ab8cf1208e1796c8 not-for-merge branch 'instant_schema_alter' of https://git-wip-us.apache.org/repos/asf/hbase de1f96096a0f34ab8910d5b52da60cb5cf5eecdf not-for-merge branch 'master' of https://git-wip-us.apache.org/repos/asf/hbase e9295fdf9b9bae1ca36e0bd495fa92cb43f04717 not-for-merge branch 'testing_remove' of https://git-wip-us.apache.org/repos/asf/hbase 6826da42644036d4e32db9347e64e5fea87f1fc6 not-for-merge branch 'trunk_on_hadoop-0.19.1-dev_with_hadoop-4379' of https://git-wip-us.apache.org/repos/asf/hbase ======================================================================= ==.git/description ======================================================================= Unnamed repository; edit this file 'description' to name the repository. ======================================================================= ==.git/info/exclude ======================================================================= # git ls-files --others --exclude-from=.git/info/exclude # Lines that start with '#' are comments. # For a project mostly in C, the following would be a good set of # exclude patterns (uncomment them if you want to use them): # *.[oa] # *~ ======================================================================= ==.git/gitk.cache ======================================================================= 1 614 20ce4e5f117411f3eb2a444ace12754e6072eaf2 9264a40cf894918a7a9c3b3d5a907f8feb7285a0 {47fc8dd3fd6ec1cc8dd836d8abb0a8b906485922 9264a40cf894918a7a9c3b3d5a907f8feb7285a0} 20ce4e5f117411f3eb2a444ace12754e6072eaf2 b2a2e060982634cfae4bec2665f2b63e1f680ff1 b2a2e060982634cfae4bec2665f2b63e1f680ff1 7d56e0523fd9eacf074b602a56bf48f98acea0c8 f23c4c093a15c3bc03f3e2bde176f6b3c66a6291 {6ea0b7f8c2af72c6fd379f04e2d7145928e237ad bd59227293286b0ea13ae7f91a680a375c347abf 19c61d55cf6f2b95f81ceeb639e042fc950290e4 ced723b6d9ad936e5d6ca2afd861348c9291908d b64e9c2c84629a8b9be39e67d8b5c7677334d156 6f11913aab78357c1b983a7120c8bb82e5497e8b 01a6e62848bce746b78d07368c35f1e1ad207e8b 7180d6c935467a2ff64b117a6fe25a0695dcfad3 229e23ea4c0c244d6e1341724962243b3fe2335b 29d0cafad0a28a7c333e57ac5475ac322be6dfa1 519b81a439f6d243920758ed6a3c15df32ae27ba e51012ca94d697ef1b93924b269088d69dff1404 b868c73e6c8dd517e371a14b1d6c29dd258cb9f0 5f40d0244f0d6e55c1e978141a9cbd8a675ba47a 3d68b4f169b4f6d73df3ed2048485898db9b11ad b507ef27b6fd198fc23f2a44bd87e31a53011cec b71a7ad6800abf449126343ebc518d3ade8521e3 3c58b69103384a8cecb33adde8b0369e12473098 a1163ef49caae260b35b18da0e7adefce273849b 2e5e9d107fd7b1e592038275606ab127982bf5b8 b1df216e234f05e79ab5751ea022b8ff465d420e be8c2b0671991d9ab2680d305d50b83319289e9b 36719c9c117ee495b4b06d0ab3b3221293824465 c7f01afacb3c61e7d4ad097fc68e6b00862c6167 8335840d859affffa954a742dbd865b1b55f220f da6db5c153579df2721309796ce50c89838cd551 bb9ea7d4f02e985fc8b36903366165fb67831996 106394a090416cd2020de8c539260f3eca12aed2 a0915457e4955d9c336ed550ec09c916b7994e94 548db040fa46c6657115570fa52f7090361bf34c 6a4909a1d70265741a8692c7e748b6cfc12b7afe 03f779fe0185565997d01a396957e82223be1ed1 6bd49f30a5052fc395b7c4e2f0c26cd9d1290b83 1898174f45b21accdb5d0711cc9271c7d422a93d 5308a3208833fafa55603dae89a5ea85157b1691 34bb26b8e65aafbaed31fa67d349023e2a8873e1 190e406efdfff5bc17b5974632cad389f782209c 12ec4b14ecd17368864aef8eaa05765cbe5f5e2f 0fa3dc2b8a473d617c0aae2148f2122032ba1379 77711658ce222493d89f8de13149d816f78fb944 47ab1c66935bad271bff97e046719b5552e89b0a f1b53f58455318ed94b979db699d4469f622b67b 17d50e4484e64302712484806cb1d3d0166e1bbf d90ff1949fd4f5964952c21153a1cb2413558e58 13b491c76c3041ce12964c3f8c937120490ea42c 2898d2a16dc737a020e773d7080788eb380b890b f23c4c093a15c3bc03f3e2bde176f6b3c66a6291} c61cb7fb55124547a36a6ef56afaec43676039f8 264725d59274374d7b9c8ee2b47a86713ab1a6b8 {0b883059ead43e84d691d92066aca5c658101dea ea9e6576945cc36f5e13a67da285498979411cb2 edae564856b8aefbede64fd3eb73c30aba30372a 41691e469ae4f59aa04bfec99b1e8a19699f1d04 fd94fcde583c66e9f231342948c3e677a99a6d3b b168b8b2d5dd34e431b12eceac5ac0952144f8d8 9dcebc396bf7dc697fba990c6ee35bc5009a7dc9 ea0731d60f09859a3d35db011822e676bc9e2dda c5d5a5d1bc4ffab220fa9bf93aaa18fed7d6c8c8 33f842855a0829b3542baf99e98f396c17a6672f 26387cbf186635c6ccb3fdaf113861e2a05dce5c 3054c3b9b8d4eb873d49392c246bbf61188fcbe7 35e3b9c31a79a891d8988e982556b4d9b696f39b dd9ac0c0ad33448a46b5e61334bf86975fd9f779 46e53b089a81c2e1606b1616b1abf64277de50a9 98de4b04ff4e466a8a6f396de372636bde7f03e1 92b2c86776d968c9f44bbf848e56eff753c8950f 75d1431a28045acb93fd0c833fc507e71ff03383 264725d59274374d7b9c8ee2b47a86713ab1a6b8} 796134e9f5ebc97451d9e59bcc75fcb3d01b4a4d 060c3831efbc1181e71e118889887d07b90e3dc9 {494e0b50e9f57245ecfbb6be5a61e2630a119fa6 d89342e4166c3f969d79e6be57d7efd0c37cd5b2 c93ceed290ed5b79c72dafa4252be0d5ca88ff79 7caceb23c70eadb02f2fb312926319d373f8e19a 230a768a3ccf4ef8e50236e360035efadaea7ecc 3c90cfa0b2f9554f7227545cac6857ecab74374e 296714c7d2b6bbfa046e0f5171ca15a9af30d4fc 49d7b9dc456d4ac7dca1cd16e019bf390332eea4 f87fc3cea43698e9f417b12aabb7bd4f7932155a 9b9f4df87a432778cc6f16becfeeea72e507e526 3257b645637469ce9f4e56d1f4b4514df46405fd 95444356ef271b90056d9fa09b5962c48f49557b 07caf4bf8e77b26469c8bc49eaa278c3a4047612 a545197935149e6234b232182259984cb05901af a8c24b81c7a228f1487c1828b6bb8037e153d19a 074937aecce2d9314660f404218c31e875d8a7ae 946367aed9fc870cb3b4746398c0f4037682d08f 128ec09a3c2be2a949cd8c127f5d17a88d21996a 955ddd27188af7dc850cf70b32ba7e2b56388c93 54fc1dd1b6df232faf7408766e77613ce472784a 96490f29fdc178234668ac7e290a73292ffa0213 daa7b14d52565fcf138cd2519852fb9168bec9ff 477d370ee939c161efbd7116b0975862fb2478a4 c5609e2688c6c5104eb3d42d79d2965cc0746f64 6a209bbbe0ceac7ec9f8ebc890fa2676fb73bd42 d57f1ae37e26c4c62e450c51b8a0e3476aafdb74 ad8837690a34e95a69ca4f4aa9c70f7687767c83 2d5f7444ffb3dc543f6c3d45de285426db59c003 dc401e991e4dd1af5a8752f8c6b3f4b7484c245b dbc84c970313ecfe45f0af7f34a1aeb7871ca3af 5a5d1d0b487f284a13bb354a9e9ea56205e32574 17f150a74196b5c45d0014967ee0bf76b3751920 fcc684655b8beeda7346ba8ad2a914e5e3639567 be567786ab315823fde765c198b0c51daf788270 695275d2659bf987111aff694f94e9bb0927ceca b48def24ca246df0847bb27c108f0dfe9e441b67 060c3831efbc1181e71e118889887d07b90e3dc9} 9264a40cf894918a7a9c3b3d5a907f8feb7285a0 a14502015ec444b4c133b6b410c9a2fd376d66f9 {ab9234e71d82bff1a13a921e91a9cfe42abda143 c9c01e6eb12451bf3eb4a0044fd3f496c9c1974f 71d197c15f17bc23c77d811459f7ea8dfb4a551a 7495b6a5ca110691e321f5175426e93c970ceb0d cf354484795454def7010c9e23f5ad9c66880f35 164c114f938a518f485a25f2c0cbf51b7a718c91 4416d0b85ec6808f328cc9f2283ed5ced291a174 d2d0ad01e051d2ec937addf2cec6303ac07c9d66 3c3594832456e0cfbd2911596facac623724d8a9 b9576d9bcc094f6b9fe41deea44d792a0cbbfb15 8ca23f089434cadcd665ef60946890926ad5894e 0d6d06559b2f440d8e6a5ef4ca2de34edf01c024 a14502015ec444b4c133b6b410c9a2fd376d66f9} 97b7df274c27ae1075d0782ad31cf284362eaf48 6905e533e3d383ad7cba1229944eb40429452547 {d02bb538d4fcab898061d7b234ff66222050a0d9 d8aab61c5575ae640c086eadb6b6210d597a44db 75bb338e039f38511aea42d7e44562ac6c901f6c 985dd166b96dde93f583a1e1c4d3a6bdd39de5ea 700d405bdcd38affda9a50529c4f3ca796c0473a 8013e419644ae3e4dbe773ba87a7c1ea47b2200e 7d623450344503bf1b67b82a7fb1c69b5160a24e 73ddef615b4dcc867595f85df6937630d879a3dc 607e90db39bf2995ce006704d3b0395573cbf532 f44e73a32edb22699c0b5d37ca9ec125a50f65a5 5f58a25c28e309bc036777665f8a84bc4bb7045d 5119d155bdf46fb7a42602b64bddf1df20505a23 f523e9e44843258f8db2e9f4d4b0f8922c9370e3 b5046f4fcc505d01666b645687cbc040fc182613 b9b720c484b9e8419d29f91aad3928ef99707608 3df473dfed4ae8a26a861dd0f83376f0a0a41c19 5082e3712907edbbe52393e8e5afcb272e72b62b 85c985714d2b7a48022ea99ebb06050816e54497 a3c23513153e9f11846ae1afb8202420f190203c b780e8bca6722778ec36fa967461710dccf9c994 40cd8b9cae77d4dda1b79378d98a7146f24e0beb 17f04e0fc8a7144f693e999b0acfb213309bba9d 7894a9e5d99456fed1cd22ca0b1a3b61133124b6 d4abad22d1f3a7d7c112ec14b57cdaab25c5fa0c 6e8a88beea67d565306282eaeb14a24d128bcc21 3b75ea6650df76765dce597b96b7bce13a54c189 0fc6e8db658fe4cdaad370a4e5ac8a55c10e7ad2 d3c2e26ccb8369b8a3c965fc2d585e1327594b15 1944fdc2fca9a809d87de7fa000886f38e91810f 5166a2d1ee207f46bdd0dccd11afb9a885ee8512 cf17994dc5f0f05f26c270c2e10107043907b852 382d2701b88e7710195a5e2aff84a5f48779604a cf15cb57ceba7d25fd80ab4af4dcfbc619117495 df35b39b0b4e32a5fe81cec356047f948e74a0a1 74a47e2aedbc0e2972d0f1a6495db42114e57d96 29e759c0024e44400be17a3c19214f017b117dd9 164fd297e9cc5c648c9b1d6826b5f8e42a189ee6 7bb32e5846f7a45899d3641885f9107745cb2198 a3564fe2ad38d9c925706d2e27c5c671bb9c1978 757f3caf9999bbac7fafcb5695b611e9702a097f dc89a496549f29181ba8f122be6db69b11525c24 b948ab5a361c2901cb0e7a117af1b3dd3da15d08 f2d0c0919ccfc84cd024bd86bab1a95405dd14fd ec6e7ca68356f4d17abeaccc5a7500628b0f69be b0c5562ca8430545afb7c5009df7ba3dd6e26ad1 9bcc833588d6bbd0b5356f73b4aaf8787bd3fd68 35375d4b29de757ffc0e4f9325657333327b2788 f92d08ea4a140891c09ebc2b602fe8d7c6722269 4f6588e0b1da45556a947926a246cc409860b0b8 b7f99afbcd87b3370ad83855c5952b5596541ab0 d139ab486c0ac74f5af0c1779a4f7ed3d6bb6590 cbaf94475d547bdd4234b9fc08042ce3debbbcc7 6011f3e673c3485c483789970529b7d4474f1683 7c19a012271e5e57f2b7cd47825886939897dfac 35c49811de6f59aacae21dc9e8be5a6908d6926c 3ce7d25316d638c7a6f01e5336302b727307a322 55f6a4163508cef951b4cb0182ec7b8f038ea59a fb12fd6f936cbea1dfb1e67f4db1bb8724be4d0d dd913ab3adab5884d62743f3cfc3f3472fd053cc e22c7efeac02efde3451a0c9ff9bdcd2725576d0 6905e533e3d383ad7cba1229944eb40429452547} 9d18d00259702b0a622127479cc2d1e56ad0a6c1 841b24b37c067cf78824cb463b10c8d11665bb4c {84ee3d54d41f928cfd253ccb4d5d49aa5d4ede2f 8e60d40e37676da90a269cd610d6a11d86d29c6b de727cc1d7e030023efc173a6034a70003ef524e 3a61ebfb109c5203f547e2f5772a404bec86ad1b 307995950fe46710d4c02bd214abe4a41098debd a7fe50b445dc0ab4595c165d6a74eb9f95efd8a8 fbb21f5dc102a46d50e3e78dc05451acb6f119ba 2e9ae11430309e6dcbe9dd9699cab817220903ab 0744c27fc8a3c19001f82aaaf55ac211271f8b80 674e2cae2f53c7d0d01a4be58c982112feb4a000 f955c91485e6b147b255c1dd6f20082625baa2ec 94184eb5f8a8e8c284f7f425a22c0710f7b7ed87 b203c37572bd4d079e89b94846113a3df7c310a4 b42b38f064d0c83dde2a580a82bb027fccec4396 f648f6060b7aeef730bb6786539e3ab3a7b4d99e 795a7c096bbe63af8fb88cb3657a587b11f34bc7 555a80585bb19cb615093adb3e215cc3c5fac3b4 90c7d0d614cd3fa5f7edc7285c6488e9613ebd0a e475dca13d9ff6bac41461237bc956a99133c4b1 138006b53081fbb73cfec36fc9b89c66a024e752 fdf2d11e863b4b542e29e7eb6a629e6d9ceb90b6 efd1f031b9e8ba0ebdba38a523bd955ceb109187 a14db860f880d3569b48204fc37ae6765415fdc1 51d69bd15351421232125eeaa2c2cf27c2e9d1e6 88142f4da0aad21abd90aef0dfd93e8eb03b64ed 256b63a04891ba4138ea7cafc0624a1f094b4fe1 a61ccc27d072bb71737fa742ac11caa96764caab cc1474f4a7fb65c4445e8ebf47b7c9f6cfe09766 db972590c65ce91d1f1a7d1761dcfd1e71d549f6 86d19351cb4e3b8a77f38a58294dcd3f4dec1971 e26f41920f713955560397bbeaf5203c774139ac d7a7ca3398ce1408e22539c824a050ffcb5cc25f f74ac813e73b51868192926eb98278edeb5d27d6 493580dc21dd493a27f2dae78d875d9cf7992554 52591973cb499da721b65a7cf363e8ccae8b4582 e216445124d1de6026d906cf155f7de3d1b3ee65 b078c4d32127e9a439602c9c2be3ee5a607ea1e9 dcfd36929328a730d619c34ca2826d939e329b15 1b2afd1c1f72726eaaa0fa4a15687e14fc843d20 3d4aaedca2119f9f6982d77814b1452382510d4d ffd9645ab55b7839a0c0c86b22b3994dc5288033 ccc03f50f50011d766b9b9c3fa440fc7441df240 ff5a191a7bf751537b2ed1a450811271063b12ac 20916e1846421c794b8503e1eef085bb5f994567 18e1a0184e790c2bc52f4d60f602e73fd920d1ba c466c3792f2f6e00b5ca7997e1896ea0d21053eb 562f56a52a952ef2ae563d70df9c836fd22b4d54 2ab0d1c8f2213cf33ce55d626a619ad1e0bad0d6 5b28f8c0920f394d89d9f32840c37923b8c7dc27 aa4a46f5b58219413b1d85ce5144a24d12c98b81 38ed953207f7108ee225b087c54b6903c1b94f3d 65ffa7e2f8744097b4e674b2ac613121479612ff d878b16081b68222221127d3f1a799e3d53f6c13 b7e89a05f20bf885cc09e227c62cb0639e532773 17e45a70d3e46a245f006d7434089137cddb24df 3b95287866ccc74e456b36ba56b5c5bbd582d044 b39576eb0a26e7607d5e24b6f50c934e091a16e4 59d9857bda226f5e1c98afb71e42789439a1c69c f3beddf8428929c0c47bedb1f710477d6e44c800 0296547aa2faf4457a58b26f5e533af326775374 572779d4bdf6010c136f93c3fa14ab11bb597ad6 934a58f3433c9570129cf802797b7b301acd15c3 29a28aa05cded72cdfefeaaf9b1c610246ac5b28 e6b3951c4d82ceed410d090ffe4667ede872b55c 4ebfef49492835e12bd8d31b25b60323b57e2aef be1c1d83e4e8842ce5f5aba1fd3c669f47f505db cecc2df266939641aaf4812b4bdba62cfa4d1b13 bbfbee0c765fa8dff21e59a382b518f94199a6ae 7d38a444b30fad7ae251f0163999c769d4a7b31d 913652cdbab7ff33cb829e36960879ea731e0bb9 b3144d37501a3d44f1ebab0c3ff6725b83cf11f2 67f166693112b51613eec9ffdaf85560aca0d23d f95b806815328e4284ddbc243e5af5cae434af31 1d351abcebe9ffaa5229354ea474c127abb262a6 ecd21a4addb08ca86218513af6feea6e7d663284 1118e1b53d1251182688e3b6b5b2ff8e90ebd77d 30e4d58574db611b0510d5abca2ae85d0d4f8c57 b9a76a9ba37b6bd0ded41618f321e3ec60b66d37 1ae68a2bbd1924ea4ba7f67199dd5fb86fe0171e b699fcfe6ff4e590e986ea111a614a022ec5a681 e7f78e6a60d6eb3fc31c6e266ffd8b615a5b59a7 4871ccdf5966376124398c507579420ef0e0100a d928cfe84330f90b256a1398c73e386ef1da21f7 5430c2bea089dc016c1c069baf2934e55a84648c 4474252ec8062b3b2d856f256feac49a8fd08a68 a8da435a58909bebb1fb71f48093027047a3e8db fcedae2b4467d9159996e9a902e01c52b49aac09 4079a6aa4d1a149f704581e2b8aa609e8afdc7df a0168ac60ab9dbfbc51b133e624687e0be94cbc9 0719715cb03586b271b5a0e99b1ebc2d93d6be18 4db657f7ac5c7d53b089547946bfeedf556bf0d1 177fdaadfe93d1504b2a50ef1aab45eb7bb5a117 ab814c612a977148000d0948c60c7404ec628c1a 68a689ee7342853f89bc6592dbf1bc14fbce6ccc d9d1cd09c737434c5486c23544cc289036de2f2d 64302205865a4ef42779e855b964089bcc59aca7 3f2443ba83fb61552e1dcef860b053be3e27a47f aa3874b2114043e32760338782f902f21c6025c7 ef139b6bb92b2624f63738076a03594620854881 49229ad128bd790d06eb30168e6f8ddb918f2995 495e30c18e7120613d30d2b8054a5da48a34fc44 920e6f1ea53aee18369c791891887558fa24cf18 18ac1e4db055f47c26576cfeb599ed8dce9fbb6b 8e3abdb576c361d18ebbbf9a61df79d2beb26e2b 79281ee0ba6b2d8030a72d7d7720521009f356d7 c8ea64307493aaf5d4d88192e6691cfd080849d2 1aa35bf4238b65f51a57a1ec78a2af26337907c7 c689a6244eccbb69b6e07b14c445b2e80cff7c68 71ae16b610bbb3ed4cf5cd1aca042eab2506b56d 72ff7bcdb003696a1739586e46a9df9c69ba9016 5fe2e5948ec70470afe899127b6a6fb56eb187a4 986014aaff44f748f3f09b0547b1b470e25ebb5d 6a1b34796492f721389f03e368463fe658d59136 f963095b5bb7eb1ba17633583e1f6082ccafea31 0156c42127a365b9ec659a551ca40c35a10b519a cb3a8b04ca4edead81acf2724e493725982762bb d7582304d94468c03696a20e6c986bebcc5c3b6e 56c71eb1cc07a02e5d9ebef75de28954118c8907 7c09c7963371854b62d0ef3e6bca09f74d58be4f 4550787436e79f58bf1f4f84a40b96d7248e70ea da7f9e4f78e977266aa63abd188daa4aa4d0e18f bae26c21d626eab2b35be4c7393de5f556904007 1b586c0b0ed03f5a99795d4a58e241859e2cb58f 504dad2cd9437ab0bfe32663a3a55c348938139d f4d0ae4ead4d71274edee3b9b98aa770e2515da3 1c463c09924a945636e345818e30a193f2484816 fe89e7bab5c41c7eb2bdab0a4f017cd34a9396e2 e203cbff5302e63d343f4e2de45a13d23b22bb06 9191bd00f4026b61894fcfd3d72599c53d14268d 54175f554a4f9b1ba715718e08f32149274b2a18 4820b519eebb4888ed72d7b1a64319ac6a219b92 c5025b2998bfe28b863fca9df6b920d62b4d1e7b b5e8a1af3ab820d0f964f47e921854f0bdae149b 4ae8733da43f9da1efdf0bea3093b79fbbba7f3b 16274495ee06a80f7e2e7a7b65feee6e70867e80 259ff46312e12a4f72db0f5b6a64d76c4bff26e3 8643b585ecc847bdc6fc549f81c98e7f6c7aafb9 97381473e9fac7a2a4be0512dc1480fe07a2337a 979604674819d8fb67902075daddd3aad7661844 2f2986b3356de6fe78b1f9d451ced5860aaa6b9a 761f085e15d5d57726972834e19c37fa131c4b2c f1c0a24ff66ac4a66de181b732ce08d1160ae315 d76a51dd439517cd1b255fdddad99fa3f57f58ee fb82eed654f96898369d7377439fd9214ebbf2b1 5cb326f58853c4c9cefe98eeed4fb52818d1c042 fceefb94b08c421c7b4058e002f489b5788d0659 1426ba05ac327b8b12294298e97ee5e30f9a3a5d 306d50c1f2a1396b83f397f8c43e4741640ad27f 2d6ffb017b73a4bb525c55caace00004f89b27a9 ca9af80307dc548a5b93c06ce8ab25032f6d0503 e8b80ba94c6a251b301c61a0c3fd1b183aec4b4f 54e178701070b723c95d4d53ed1c06d008a03f2d 7619851abdfcc903487fb6d41cd6256c9b706e83 501f2c550a7e3181582f71d2917ef0dd39782dc6 0bf1416452d41ec75294ac154273aeffb3c9d4dd 62fd1fa2303b995351508666a933b5a61717d706 c61984832015929ac85379a142c71bf9eb22d8e8 e04688b7d0a00cce7b969b3096d48f152b542a17 a5604300ea3725f8244354ee08e4fc81168e6e88 2e28ec44c8e0c6dc99e3ba039e3cb1fcf2def1e6 032ea4b380a6e323faaa9d13e8c0f51c46d66bf6 3921ae00d28fbbe10715fc7223c851bc1b15ec5b 29b338873172a9b92dd393c2d3fb738614b05b42 d71c0330d157f63f080719af93d10f9ff9bb3031 de89c9470c5af411baabd15f08698b16c6621fe4 93d11d6efb3712e7c31186bff8b4dfac932f8df1 9bffeb9c2b44652b8cd3673152cb35d6f73a58af d2c640d60bcbe5ac61a488c49cd1b76e05081496 5f9420aba0d0d2bc1ebc95de2e903efb4297811d 8ff98a50f70f9c488c02e8d1fe0105ec668bec69 ce306f80fc82c7077bfffc911c7468328b1705cf ecba5e8293dbf18a1e61d0b313ec7ed278e78b22 39e22083b962c3ad6b9f0e65747bc61592356589 1087fda111c08dc6b1b8a3192946db602b533086 3aa7a38527fafe6c4b14b4383cf572f8f37aef82 d425c2ca7635a937b2374b6bb7745b380d1c5fb6 72cabe8b2ba9a1a509bdc797b0ca30d2c14e588f 517360f689bca433d53318f92bb2339db8b389b8 abaff6b49028b67a5dde2e7fe75bf85e9383c246 83776a477a540eec7e04e7f78d0e9908a2a04fec d6edf7147bbfbea81022c8ad0f359f1e519a7880 24b6c150f5c077a05e0be8549e4d9f62fdfb6b1e bf780c40e85f3276b4b44a450914d6c3b5ee9619 75aaf88b1b026dec12dd6622f1cd71393c2df0e6 f6533774896b979d1175aa215804efdc4036c7b4 5912a205f90d66d35dd927c2e88d20d4e304bf63 dc31f172f9766f86f4cc0ab1f211e439b08665a4 555e6eafb15c3d84b0e50b9e0f5cd557e8e77bee c17637c7075b941f8ab65960bb13df7e6f2e1f84 449ee54a13f2c4d0b3b10bbea837b98f12e23ba7 6d73f87708b2500a2df78d9248106338c7266558 5ffc37167dbf371f8415b695eabb47048975cff8 48e8a694dd59151ea0e7913922df6a288928a518 e716f0f835873d188f3673c09269601aadf53af4 57b2f88df5fb76b277e10a216295177dc426b0eb 4a5953a7491afc25ac37c788b2c2bcb5aab3366a f5d394dc5f462e1a363758ad46114135e8e70ad3 a8670eb5b716eed5938499d8e2df464c70ee60d0 7bf052ba9d51ebc7b96af32821ed6ae6dc69d1be c2d91931755d3b1c49c0beb8056bcb9076a0cf9a 8e9784db29296e887fe575fd50f7659618a624f6 3bd959d4833345cefa71bbd303ee8dedccd8d618 68a38080d3d793200f68780589b47055e4910389 7e948e3af473748a577a424d0f34f832a695d7d6 331db5a0dfad42adb2ffecac01ee1a66749a018c e1875d357838cc0b1846e4178329ce111ceecc15 cffb33ff17179bbcceb0b84ec0d8fb7b89b654cd 106f16eb0ddae4811979c0c3c3dd9f9bcc85ae47 f0b565a265309c6a06dd05d773c2d8e1e1b7e14b 01afe19a7ac0b927b43621034085ccd4416c0d63 fde0eea0efc65d6732c20ee31514fce99416760d 2824f8a7983e3604621e9e60ef36c5469d6c6eac fc91dac3026891daf12c2382b5cb16b0e7632a87 65a90be94c7d5dd36610c68fdc7958ed5951ff01 2097aaad71ea5944202a4ba5c1ff8bd6ff84e090 9799fc091d4449d885b68f17a297744810939452 d04ad38e6861145d45a9062e521f392d483e7c7b b050ca2570a65c48e3b39e23641e6c934ed37ca2 c091e3d67a09a0ae9c9591bd920bd5a312a2cd41 73155281cac2830cee1465833fc4719ae9c5b2e4 5f2bf095631cea30873bcbde90848201cd4ba875 0691f3b2962d9e9d20a7c7c8483599c0e624976d 1501e0994fe7ae580fbbe3c2bae6d59bdb12c8bd 356e9f89dc5e476a2ef29ac3e4933e54aa26e793 d2d9cbeb9dc440c907f3fb281a8ccb7b4dc1924e eb48f4dc8ec96482a370de40d424d13c9d930251 7c851cc018b385eaedcc09f5d6c7cb3c048c5745 5ca8658fecc9966c753bd079a7f20567c2e4b346 ca46b0a9a36783f57c7d3bc79443485bddc4c689 13b8bcd85b5a5e1c625c740967b95a1b2f35a696 de4576b4c6b8a9268762c1985e79cad765be6bac 94c18b670927cd46409f3338c5f3644c7236cd65 98f6f909b80301d550b8556b78948c6b9d7f98a4 ce93dc7d3e7c9bda68c9e2f6ed43ad520f9ba43f 0dcb1e4e07a5f3a01fb99e5fb1baf92af0486fd3 4b3cfd4eb08690811eb2372923781400116fac3f 46c5eba36ca3f9175015dec666312de289ac3269 a8bec467fd1cc65771849eb3173f369dc8eff649 97899773fe409807da52c098e89ee2a7b34cdb6d b6d81e14350afae5dcaf58385f66e0042a199434 247afbc7c27a9d5cd071c016b14aabe5676cc27e 2e699c23850b44ca39d80974ef9977b1db626c40 515fc6a898d54d122e95cfd2b1189f64506fd20f 4993b47e69cc2c7098575fdce7230aad129e9e94 756a30f1c6f4450f7571b446f86f3b1eba7460b6 9a755fc75ad21220f21abd2fcd409e87b5b64819 0f628b038c106e45c6e9c94156cfef15b6c2aaaf 5478e5a82ea2824e42b274eb803ff2e6e0bf2d7d ef7e74cc8b69e0347478f048aa250ed975e65b3a 6b08b8f6d51064b88ccc345975a7fc1a527e281f 9fba4359ff258590afaf8239903aa1c9b5318371 ab9b73f6cca06540378e6f41efb533f34556d83f 0b0483c1e221dd06bd6b8416fff034ea5460ca2b 7d99ea7c1539cf03e64139f9f6d25968f1471032 77b87868ed75bdfbfdcf0259f24a8e45586d55bb be31f328207c1eb41f5ed2c5cabe768d97bdea7c 91305df6f81a4b24f225073253191b3bcf51ebf0 569a49bb8d3a85047daf46b1e14e238fc18a2ba2 70a5de814a7b7e113c80f98c7caf259879ef5114 34842026f721c7b7174b36d816d8e426626f237f cdbd0b3908bf52405a3f5eff75271f0adcc4d066 24e4df74ea8deca096f90970ee04193a405368ad e38d12f7cf16272d70d954b5bdb7b108fb5dfb74 8638a7cc9d46d6cf82dc1d06583938afe4a69c14 5aa16f136339fddb2f1e7f9cd1b5bfd00d033dd2 8a9f332ac0402962f1276def2bb71f540ac49400 814ef40f8767b2a3ca4ec2bb91a186df62e2abc7 852bb0c21fb792b8b2ebd65db9a0b6a0621a9ce8 a4c4ad68bb4ce742732da32266c8892762353985 f4e691228c7c52b60464b73c15be0a45932ac986 d2cfd53dc36e9da144065b1ee3ebe48709ebcee7 d6929bf4953fba4138129404cd37e94774adadee 54aab0e3c0dd29f80c7b91ff921c4da61c8a9ac0 ba1201b8d9e115c9dfef39b4108dcd88411e747b 7c6b689910ec97eb74fce47bc47473ee8886fa3a 145aa3e14098f8ea3c1bbf2491a262bf5f7ac691 9f592ea0b4f468e9e3fed2154b79736ff2aa4b2a 72b2029069d3355e7ae578cc0093e07bf8d29147 dc71a9b104bbe7f93ea7d1affa510e3cf639c0f2 bbc3f55cc70bbc99933c85c1b6e2a8955e741adb cfa23f2bf7d1796f90d23ed432608f22b5296e39 63b091df705966dd833941ad404dcbb69d013b29 167b921e6a986d6b9f7fb8c348abd5583ff76dc0 5ce43e9e369d4898f8bbf87e4e4fdc8e1a65594b 526fd6b84781f6029583a19dcfb00968807dabe2 0b6039a650223302dac30e47747b4588ab6855f2 919cec6a316728bd0d3a5bc09511218feb537b0e 415e06f7a19f30a822b79bed7b462c9f05d26814 f6cd1934cd2d31fd77520218b73387e919838f8d 03664f14532f8d45e9fc959291485a16fc5dbbda 772a768a8e1e5bf363a789bd6e22e9bfa53596a3 fe6144f0b17e0ab408300102e9cd157c4b96306f ee9744d13ef63d78af13b11e27f4ddc7756e48bc aba5bc9c024d155669cf9c1e0b0645bdd88c1579 8fe580a8564b958df7622d8de412581488a08a7e b68726affa5b7d3d09429ee3cf7ec1991685fab0 45e75403476d79ea7c95de9e08d8285c9f92f824 f6df3854b70833b9c1a951100bb9e10f43710c2b 653c4d730acea9282e5e4f712642b8a1c46c0616 19e1689334dafdd3c3db52f532f643470747b201 43b92333eaa1727081c75796036545cf767ac71e a13ad6c1c552299d3c765dd7616d7ae1dc6c29d4 00e00ca1137b832e6ee0c843c38c2f410ad6e41d 300c8d37aa4bb3bccdba8d446d0d7286b6832cb8 d59fa10d57111a9b190c2a9b4419e6de74a48b99 6eb45f7f32f41f766c16b3b29e31b1fed54343cc 2f48d041e90fa07c1e42a1c84d83cd8a7dd09c15 7f6043e8788f924c774ba106ee67e27c580705b1 e4911481054860b12499b5c7cc4fbfe3e1374a1e 98060a04ace1823eae9cfc022840d7e2ec1cd999 1977527c5761f414fc28f8394847a2e289ca3a1e c45f2f28f97dd078fbb68a78bf8551f4c687ccd5 c10ea405a827c9d1178f1763f6cbfa3f5a114f23 9990c057fd8fc2361e7206ce3e4d15f739d73ba4 84b914bd67fa0cd2b71bcf662d48a7147f2d084c d6bed49bcef511e032d0d2e74b1ea7ca04ebb4b7 6d30374de92d39d5fd5da641e13131d5ff87adb9 8cbe3e2e2b2ab38c3d39698a33cef0e63710230e 500f1d0385d49cb6aa5333a4414002dc7b4bdfd3 954cea6d8e5b9ca2f8febbdd1885c2bb691ddc42 7ba253ed8f5c925283cf6d006b741f24f895a7ec db380918a40b1823356151a1fbdde04ec075a8fd c70ac8e345bd5674402a7d65903aebb74cd92146 aafedc0e2240637f413faf6e5a5a035b32c07b8d f6f4cf57dd983ec5a86330af5138598ec025036a 20c94927d6c409167b32b0e177408b5cab7a518d b2a9ef43fcd0e5ee0cdf50e5bd6e2887378485cc 4f48300a6a688c5f7b49e4e9294904260647dd6b f5193074b4d1c93ff9b924f3192175957fb2fb1c 9760db2ab4e7126751e4aa9807c53191d1b61970 cd2c06f79f0cd7f629a7b23b5f0d39a43f6ff173 188843b08363e4a003e0c745de4b5ea29049d482 04bbd5eaf7140a52ecc86574622de91d50ea3ce1 4462bd13e76a0fa3e8d6ba1b0c9e8f188dd278a2 4fbea887b83ecf5f4c7c345348e308b0cf90a53d 7b365d61d202b3ab161fc0cd966682540724e26f 5f4e89f6ee63ed906419551585639e74390a138e 4e96a1de1c3949b00997e92667fbaa8cdde5e9c5 225585bd4b07bd1192051cb8cf0425899bd22054 68b1e5c5124a1cc2d8e262538445a585bd6a4ab0 e638a1c48c8b93ba48324afbd6b004294ea8ba0d 391f23ac50d2799ce979cfbe6cce9e29c35fb763 e5382bd9e9a369d5e55d763918385c22125c74dc c0c2f13fb4973ca724e1da615d52a23ac44bd574 2e44f610621e16e274173aeecbc2f4d850b91b81 ab728f5a0a9625b17556951248d7c847e087f887 c44bf173e41be8da833ce44cee538643fcd7953e 0c8707330d09cb816b48eff3380efcfdd27a15e8 ff53023092d2a89f6054981f814603b47e85e996 6cc4ce9b3b246877049313a019c5cd3fd3d97bf8 5b395c382aa802fa84a6749ec840430d4a3acc6b d210cced907e2a1e5b2367d1a74c6310fcbcd035 2ad018ad3d0b3d1c0c7cec9d01e2e6f22e81e1a5 138dbe222efa302ea1ba8f76f8f1cf982b615c2f 9dc5de8794e8224ee2866b3a4d06658e7680db31 abaf7b271c5f7ca2b5b90bbe54ee9eb7c89933d4 7a3ad000ee16437d2c9497b23871c8d02a6759b5 d5869574199e3ee48e6f3640bc74787d1408c50e 60cc0a991d55b592d0b14297e0203b47881dfa75 3e8a54e56aa73e2c0f731235d58b0dffd6d47610 e824de7d3b7bfe32fbd752a90580c36700ebc401 883e42b3dea865d66b8150c01b8dc9d326c62c96 eaa214671556c9ba54146d991d577f059387e324 6f0508168584ebfb7aacbc0b5801e18fe6dbb930 e1b9b7b758b787b7277740186f5b74b01a75c0b1 391b46f15346591459908ab69514003536b23347 dfc253b800284006e195bf2f6f3f8655618959b8 0b72fefdfeb4da6b9fa24ab88947661e68c0613b aac733641317e66f9feabefac177e2d646f1334f 62629f2d3f6addbd8b1ebe3eb0dc2ddd13e6ff9c 99f5cdff2b58eab4e77955b3be109499b4a7950c ac24e33f43ab0a28803911e22a3de1c29039133c 6605c597bd5b38dcd9c13fd68c54e14aa08ee8ce 6f51678aadaffe99a8d4d6e176c1d2dee94b7983 635949147f4badaa4d689988f972dd9b3a25ef39 38fc69c10c9bbd50f8a3db66b4a647938697018b 3e9e2aedd56303744ce8b63959f4d41369c55e44 27a0cf893d36b092a91321f3c6ad4d5f03986181 a126a650aa2a3f9d63dd2292a77454af4ca64769 1390b9225f3ec4312ad4a9921cbef3eb63945032 9e270796d16dc9651470a7ebe9087ac1b1a7fdce 2008fbbd5ba705cd999ea01e3ad3277e44099060 3aac165a3a9bbb783a136e971191a1e12ddbeb46 f1a81b215bd433ebd667b9a39850ded9c315c810 8cf8ea9612b8ddd23896920e1e9d7310017c9a6c 2f1579e92e2f40f386655b9c1693dd62e2cc3932 3ff9af6f327822fbb0348a217ec0512218f22912 f6d7bc9855459d8448d05d29b0467a905bb2f798 1bafc48511faefde2090cc57816782c34c43e12c 69cbc81e5ca0b31da0575d88a0082833ff84012e 7922bd01d8c244f38dbe356cb378d791568546a0 50cb12bf5c06ebf78f11ea94ea831eb22893f336 0e165cab73b189b0bae6788b40ed9b67ec44cf3d 289ec510884c156b7d090afd289a9489baa9ad60 64ce790f7d670c9d7ab9f9b4892ef0d87fd89399 c213085952bbf7c7c6f9c81c85483d8b1b1f0bbf 96fcc95da147db4364b35534fa68e83efb94072a dd028e58c0744ca9e2a113a18b5eca7d4838ddb9 366ca123e94106752fbecfbbed4b60adf5146d20 f1e188c39782eb7faff6c66376e5b277723967ff 1a08ce11c9320afdb0b72a823c90e40dc07a3e56 8eee0f17ba4862e87547ee98a5f0685a72d03472 15c295466dec3764594691dad36635d04ce770dd b33f042ea20026d47fe598f7b38c18dc567c7abe 455c6eabb35f9e274d15575fa18a39f9cf0a51ae e7a783508bccd1cacf222398f74a4af3cd12d110 b63fa8c82a5ca3bf9f591ac4ecc0e6eb77c48a1d f39aac876d0e2b4e676d332431cb18fc04bac924 647c667c3fe80e299c428a3468b829b29de5b575 41961046546602ab24da6d4bc72fb096991c08fa 233dc24e3deeacc6856e6a4dd5409f568c57f18f a1d50e9b90b5daea519b7857218850d71af4e03c 23c993c61df1b2a41ef1836b94e471cb7e801549 07eb671bc9a4a56125e20b7b09cfd9356f5554f7 9359a47cc0cb29d343ec3ccc857ac0fe90cbbcb1 941997d81bfbe1f6637bd4c45d0c3a2735d51113 2510a0542055b8052590d18bfa774f0560f6c957 c739366d7395108a2a3ad9dd49bbef3bd5001b1d a9234190e9b6e057cfbc61fa9d63c031d8f6f972 14571cced2b15ac649a32ed8e0c62e9e17d4d53b e88195fbf189b8f391a67a3fa237c0403d00364a 1d604e2890bb0f20f067bc82c0d6cc40f13e2c02 89a2c3ee44c331d007ca57940b35ffb397100df4 32731d6a0297552310131664388172d39a3282ab 836d31b746fee6f4034414f11947ea31f18e2b81 866d20bb6e79c8b6fa25e48682cd2e91aa6eeb79 bef6bfca21403c64946ef7d5185916e38452b630 de1e7b0654bb40ba9b0f560f4acea870af7c2feb 27de62a78e2959541b12cee2431c3d2e8a00f1e5 476f51c4b8a8ad049fa5b2cdbacadc5471a53afd ca529ef3902bbabf39a335d55a7e4c12b826b79e 3d58731660c6c64ed6a9182247ea75d281d048af d2704bd1e4aa11d87757e24d137b7036bb97ced4 098fd94a0396a63e3e3d69866322eb4d514841fe 289ab6b87cda697990866fe9159ba3d9c1f1d98b 0660a2f78f5c74397085784e08c4d1b418332a44 8bb307791da86d5c35eed73c8442abf339628af6 77d5cfc7eaeae036019bab3917fbcebf0137d5d5 8eaf85c08ecf39231f6f5e67b7233153882b821e 5e8bd6a46c92e1a9fcc8f6ef869bbb26852f42c2 180d2e3106602ead537f077eead3d875fd6d3f7b ce1861e244edaa328e961335c5efa516beedcacf b7cfa3d148ba21224c21e983c8e358b528dd2667 49a3385a09aea1359de8f68329063f52357ee97c 3b980dda5cd3b16948e791ff2cefceda36650bf4 2178355952ee30cada8588883d74426a450075c6 827ef16fb66cf8ff0103e5a3c1b041ed8796f2f4 7adffebb46843588f45bf11c5416a08c2399c4dd ca9d748276edc4921af6fd9325c87409c2eb2656 78a8a62c703475d8887e511fe0bf827813fc4af9 15f5cd5f601a3da5dd6b06bb315c57c4596dc912 a0268e14074114ccf1cc31d869a90dfea808a351 fcc3ad186ebdf9c96434da487d93ba052b187944 ca106821e6a48de14fa76fbfecbf335b87ccb01b 12eebfa8dbeb421d1831a2472fe061c9de75ded5 d568f82932edf24107218b63c7debdad11fe5a58 beae4e63176d45027dd6e966039880807139a913 2a794f8ef9333dca8c21286585088b73ce665224 947f1c639d30c9adefc5b60c29eb71d52cc74c04 a1ebb191e7da4e401286f3e237818e319f39ec97 82f65f824875bb3f6dbd614f06b1020d4e8f9a93 a7af14d8c420af5df3429cf06b841ad50034c17a ec7d1815b9c9d0098b2274339f0d977bdf2400e8 2ec0f8980c7941aa77685e8aef557bccf5a7360c dbd253ffcb7ce4e7a1a6a49eac3d6204408aa54f 35133cef13543bdf0f5af8c7d4f3ae9408f3a984 6c3429f7bacd8d0f9e7102d74b3cbbfff3cf4010 dcc66821c49394f2795a1769ae65946a831b3acd 206d803afaa9dfed000f7dfbe454bc15d4dc7d44 0841a458514b7a6326634d755b4f5229d66bb5b9 9029c6f0450f53dc5e67d08bce46fe82fd731e41 a3fa08a08e2f7f3ae4841bd9085c78c6a3a365f1 801abead76d2d91de8dfed6ae0cef62b354be72c 811f068935c16c854d15a2b4859085be698d8b61 a230c2e672539a25e81953f88bf6d478ccea94b0 dd06cd0cb5cfb06383a38e16c4b420e8f4e4c9e0 8e27c7deb0d18b65eda35a9c066636e283fb6a80 a3e643c2bab1cafb72fd8ef5280eb91dacf0c699 9fae1ecd33bddfba0886c3b0568bbc6e2c9342d0 6d9ac60daad85619879631e290576503292c6c94 f863550537932ce843ca8ef9f96b799313ac6b1e 0d4666a309e65a3ea03a224aeffa9c640e2b04ec 674604729dcfbfacc14f6d6b286d48dcc0444660 cfa6e0194000fb8c9ef0435233fe964f79b28ecf 352d6b25e5e21b1e110c85eb98871bd1067cb8cc 3b11206e412c8638694eca5f6b8b7569048513d0 0e74cd81ef9b20ba55d5584a1fd8126bcbfd9a1b b642403afbf59f041970d1559d2edcb30fe23f73 1bdcf7a966a423fd3a369a41cc0a16f6c9c48848 0493f1b510f5cba348efa59de6d9f1b2971b88e9 a3c470b761d51413ff0b673d8839d6aa94e491b3 dd622c442a1cadbac3294fd5cc75f45afa7bcea1 3e665a4849b4f54372cbd6935db4d414ab1aa559 b1029bc77a901342e03aa24f9761b9fd1c491137 8f602c308e0d210dc293ffd6224dda34f64be426 e6804acb2121b39e5aa10f5e09ed1488f6e000d1 9edbfa594bc969315bd4ad5d5934429c874828fd 9e35617cd9d6fac34e2c9fc7306c987134895338 0750adf8a20aab19c65ace0aaa28fc63504fb965 5e6400fa3e94f55a91650d4fd7e4a0444ed43e88 bb77ebc8bdf0611c05533e38c10d860c28827f4d 7656d7568acec620a983c9a4766ee818d32eca96 18902cf85170d43fb3dc6f0c112a721cdcdd5f94 1fef74fee229f60a2c6057abfd85d8c5418e205c 7bdd6a74d650d8ecdf0c2074365376e379f6a624 2bdfb57869c04a7a300b19dfcc5d2932bcfd628c 7de60ba7ad55cd4934cd6ca4c5c186ba99e8b095 243c222ccdf1019a5d2a6b5323bdc235e83e6ca4 15e07e8548354645aa830c4e60122e0bd4fb5c85 3a0b571d7b7b583ca2170f99cd6d0c1abb564584 3c8fd0221b1b28f4dbc4881fc3f02c75881e886d dd46d1a8de421d35953c857baed958348d752505 2150e9a9db0d3c265243a8fa57609f71deb38b8a 4ca039e4e3017d03c6fe2cd8d6ff8b2c30689f4d 1e20fd3d910e3d42d029e02c562cac91291d9947 7a41a0741a9512b6a62040b3f27933666bd0c002 6f8104a9669e3e4d1a5613b451c5741da7f658b3 1b6754826c2605e2d40958848f7dc7ccfd09d907 c0e9b9c7817f83e3b0f655d2db8f26d9cc1a5d13 fd0eeef3cb817a539ee10e4956d47cbbf7818a5a da7506e79208481e493374506d9d8d2b4e45c15b 99d2e96fdad7426045a65f7e7f480eb080ddf6dc 5144d15ce32476e33ff2942383100812bd62c7e5 92d3d11904a46e679e439db077a8df4b3ee3454d 4337d1b44de59803057e5cbdbd60d3869d7779f0 3d71c76b7a1b2ed03b72f4a27e2bebc2a20589c3 7b13e4464691bbcdcf5a6b14bf44a7e3a6ad2c9c 8ea00217ed5cf923851e062106f210fe7edad586 51544094941a9a016aefc3ba185a63560d09db2c 088a83e5c595e7dc9c3f995606ea04966ad04cfe 72aba5f19ec0be6a73ba550457e6723d0be2b6bf 649126bdc5a2317a1cc4933aa33d253cae3098db 6858c06e909f4905cd8ee264c7b5e39befc56547 8d04580ecbe545410dd477fe0ae86e83e236484a f600c607d3ab86eaad034ddaa9c999ff54a75c07 c3dfe014b32215625a4ad46bdd905e687c4735f8 4f595071b95c497255b03d9f9fb87d7c1ae14c25 9b038d34faa2925cd079120c8de4452156f2947c 03cbe122f3e2d031b6bfcfa2baa47b00d9ed6518 dbcad94d43483385ac1adc4d7a2e67ec491b6e67 5eee6a9320d3e14a95f0f27152e07be7a83f6996 e683b06fc5bce2568ca361d8be0e39f64164220d c2b27b1326d0c6f541fb3f622c804c443527b81b a1d2659631e9dec67bf4b4007bb0be332c146948 cab6674a42bcd90d23355e7a22b049db8fda0c4c 28e1af8be1a0ed65edd7f26129b404bd50d83d87 9d56b27e08b1ee0918af4ac4af8c6e511ac74b72 6385251a0986866263c1d7167ff2f0aa1173d101 2dce39eb9d0ec0fbfd0bac633d6450f4e13a4537 152f3160478e817720079f35b599070edd958d41 45e10b450fd93064d971eeea77e9db5369cf240a 03d55fd7c1e283ce8ca4f5d86fc231c9182877ff ade8d49cd008a0d48f9a79f969c8c34c3dd35dc9 34f8cfbdd65301cb6964e7dfe2fcf00c6e357a4b 90c4467328eb4f1b87191716ab49315b3908e4f8 e86be6450fb7fb5af0314576e43680fafb526705 dd9f60c8f45d0e61604076f964dab68846ce146b 1673fee15413a814a4200a43a5621e6a143dcd13 7f642676c72c4e8a30b14c67f620ccb9a74072df 6595f362ca2d88b8791209fdd9773e01896cab4b be04a10c497776ad91cf29f2242f9fc0322e4950 c7aaa2765406d9f0613ba444f52cf305a34dda3c 70622710809d505558b055ccb005d99e16668b6a dec419003a441f944fb3c24381f32d6964318a5d 77939b7a690554868ebbc6ac59b8def42af55b43 2ff653ecd70809368e04059628618ccfa501385c 1d4d848fe899f07c1ccb269830d6613dbcb949b2 3e2399a7b1786bbcabca1dd9875bd8206c0f36ed c2adc874957d995441e9e16ebdc4997878e73d05 834f6c127311d2d6cdacbbc32ced73eab56b9f94 fa5ac5f4a726becbefa7536707e2eb6d1025ca8f 4fc40415b45e98b4540f3e6df38cfc85daa5753e 28c774a4f917d20ccb75b52088bde92b7f000d06 09f9e862828d9d1915565c63466ca8ae631377db bf93371fbcf0a27d63678c6b602af6dd152df12c 45266a06390db6d8eac7c0a49c19db9c04b10504 8c2e5925ddd3a0db0fcf584edcde5d163bd4739e 09e59d5bd18008ce4acc3c183c7c2a12b35f71fd f907c695cfa0706329230e3addec8bcae49e32c7 f1fb03e25d4dfbd765f897bc14290bd8d4c94acd 2126ebec7bed1370bbe5917dac81c03b9d067114 444bddcae36c31c019a55d8b4d17b2244cc96947 64a3b487c3c29e7e04b122d2e82fd6eaebdf296b 30ba4b8e20649de2d965700a64f8eff9dc22ce24 b5260350ec5fe53733ea772f5ae8b2c793529971 cab3e43480860da9f75dc4e04a57b5171d4377f6 7e3121eb579febd2e9312acc4c590344ab6009a5 4fc4ffa8a2be9089e6c8dbcc1c1e34ec25833b13 0b1f25e7207d9a3b29572609c54109cce14e23c5 b036d9f68c783517024f0c0b840b4941bb41b396 8704d70968ead57bdf919ef9eb6d6ef693c2fe7e fa9c9dc067a9ec60abb85d688f1b05c70e3a7f92 613ca5191e7e3bbe83adf34e9f4aff8d9a23ac9a 95469b1131fb86a3b0c4c9554afb8d8cfcff4446 07ba7bc702538c197d9c9e03603f1a35917ffdce 2f9b762a1bb694518edf0d2e3cfa083e2c79543f 27c67b8577f5534d3a826421346a079b7a18ba56 efa28852aca85ac04ae5cc7df047644d0feb2503 4b931db9a3fe47fd67b020e393205f617b2c927e 4776d701a723a95b206e15fa5f6ab6a27b2ab20b 8e3ae3f4ed4af25aa9f86f26daa5c4b0fdd578c9 00bb9a236d9254f0670df26dfb30923556b9cf80 256b702ef9c5303e275d896a5fbcef7de9bc240c 004e98d58e6143c4e7c11daff6bf1270b8a9669a 11f59a9867deb3f5ef9aabde46ce45753d2dfa08 e22e4efd20ff8618706db8aad118ec04350ad878 611d04cea787e030294a914a02ae49db5f76510d e848d67f5951f618792aa1493e9e6632912159fd f5b5e66da1d3d51aa6107525134852ce7836f65b 5069399141c809deb0c147689ec2b998f8860091 a07b45a0c30f11ac1d7dc294b55a20dd39a1a242 6b76bca485df807a0d9d639288ae50f53bd640af db123f6d44d4776641fb2caf522c72da751c6ace 7f2222381fd09603f592431bb5061e8706db3fdf ccaa5561a594f681e5dc3c023a802f1b34bbba39 00e59d57d74986e32b1e2454175fc7a830105caf 66d5d0119b601a72cf3833809af8ea5c1949f9aa a5964e7824edf56ca1e8cbc86d7a01d73d836f7c 21be0d17fa7c03645b42d813f11bc48e27cf23a3 0aeff3f2c88bbec1b102e75168465f9c9f547d96 ddfdfc136379e8816327c32c06f9d9bbdc239e8b 5e6f8c0a79fc76c5c66fe51bf044362e721023d1 41b3d0e693443fe9272e833425d5edcfa2c0ea2e 6ec1661f4a879e633eeeec3642207fa9f8d6b93a 1ef8ccf829bc201dfe90f33c0df2637b20ce6051 89dab8e115c3e8ee46a23a855f977af2408ac295 3dc7b7f69b93c41c30fe6f66aae2ef045adc2a15 7a85b8405699ef271856428645b948608a6dbeab c39be189e8013a7d9367e15a9b1357b069b4a29e d4ce0b40499d9f682246509d6c05fd88155b0fab 95a7505a5e2c19334677143feed22d81c94fa2f9 c42a5d5595e5ee35d130ec3f878b4a00291516da b5af0d4f04fd4b5340847dabb796ef8b72fc00e9 58d3d20f8fad55c9b922772e4509a900cc7ac75a dca8995d8e510392e2f27a43b1ad8fc6af8997f1 94345a30ace75e8315e38c489d9e5fb21dd9b54d e946839c4aef58c75e8ec98b602b78f947fab7dc 31302aed75de32b7c54fab4b634850765a8e6fff 03975310adc13cefb0b53069cac513e9fc63661f 8b9f1cdacb691f875bb500ee679cb8aa50e7a59e e3709dd258d4e3f3553ec31897c6bbe2c89911e5 5da7381e1d3a204c57d55477fc57c71faa023c6e f69428067bced6722c6c43f6844af27ad60eec78 df019256d6670ba9de12e510164fe8784a53dfb8 76c1037a102d5f397605e4ec14ec04036a96a36d d65478f80fe66cbfb1679ea9342254d088e9a32c 3b8679a51743d879158f5e812b5fbb3aaffde59c dbfe4aea1c737d9f4fed071ac1532af391ccfad6 88d95bfd6171c3ee81bc5e9580a9aee8780f0ae0 baf415aa8ee3eddde487b570ba4ebb1d1cd1230e d6064639f507c4db69bb1ca59bdd1ef21b324683 43db01c819785706c37a6a28e94567345bf086b9 65886b036f84d7b3b9164b86b1eeebe577672485 a312888079b221e9b102330041fc383fafc8740c 0fa2bd4afaa6ebac39c8ee8c734e77ce6ef671a7 80c88735350d4e0363d4d444dec682602047cae4 2303737a4806f17e69bf3c34a71f105b9d451eae 34ebd38b08a3d7bb2daa27385aa081cfbad0add8 ff716fd3416922efb3d5657d11040aa0da167afb 3bcd8cac01beebb0c09fc7cf1827487f3ad27488 2431722ffb5eb16f0aa4c29cc15ca99f556503ee 96386be728d72af1d32b02fadce97df2d6020a04 d57e9e9a6e83b90883ca3b94294abe44c2129da9 f628338e5df46cbfed5739f402d5a15792c0a723 cd28a0bc63b36037646448142f1c8a6d1e0eec6e e27a414fbea12832c343f94ca123813e47a8e602 7f1eb78181da90fa8bf148daa6210f7bc59b8d48 7fa26795cb11d453b7f45eb891727883146fbfe9 777f64410d5e455ac51ccfd672d31656b6d935d6 8d6a199f92f4a1f3f3ba5243266d4768cfe39545 da709487224439e3f6860a4721b225c883f29456 67dd7215d7fcd6bd7cdfcf4f7a33fa5d2a0d9617 3357292951eab32257363ef400ef4e6cef8514bf 05a9feace8610dd69e8401715fd24f9644b467ce 44ec501a2bf14743e7ec196878fcf105770109a1 334819a236c870f4ade003370943c10c52e7ef62 55afee36636da7f4cd251802a5af73465e559ef0 43a2f583684921cb4f9f57953fb7645004b3aead c21266e5b2fe7cee03330a6ed5f59040e904a5d4 d0050ca203c028f1a2eca3835d4e9c5c08132c36 ef6eda3ccbdc306f1b969849a336c0ea10dce7e8 e83952f9afbedd2d92b737cb80460c5e633105e8 c831d7b512c93e4c3f59f0100c5b1d656070aee5 210a37854d9cfd748e89c6f865d3aaa00f6cbbc8 46286a023e928e838728f026e52d653271ada010 bff42e1d843b2affc2492e66b104ebdfdad08fd3 c3cb4431103c9ed94b0f159184164bfdc7ed1fdb e0d390b464fb4fc16f7f65969e38c1ced6485d08 25f2ee72d18e55d7b0b4009708b5642523004020 87a883881ecbc736ef82a735b049cdf64c1de031 85297661545696085e2d173475415252dfb1b2ed e2cb23a140d9ad0de7a6089ff2e403dfc289bbfe 5c849b363c41d36bd3f9ed0ead1d6caf4a15d1c9 8764ccd917dc6d2d8136352522fcf73cade5bf05 08a9d9c3c6338be9c9a3320003b1b635fa4cf8a2 88ceefbccfb3ecfb1703af4791c6887298e2c25d 4bf70ef0883009fda549655899151ff76fef5d90 d5037fa40ea8b11592d3bcaa72906126f29a6d9f 9eb237ae38093b5b454d939e29234781cdc13d2e 791d8bb406b7c51dd5827cc66017f81185ca784c dc22ab335c085f1643cf25f9d175967fac40f2cf 04c6066df83f77bbfac5ece148d25cda5c87a83d bdf7e362e345e7a53137b61e8a3d33fb95487789 2e482c5fb25963459af8c6e1b1ad116336eea873 a72650922e1deb5a60b5b0519f8802a5b00c877b cdcaed4d42741c5fcc6c126521d4c85d47ccfc33 92157f2023a250d952a86efa30421390497370fd 6f4d35bd81f43e53cc1219fd3ed4e2de03cd80e5 42a9cf9fa7897354c139bae465a9f96e9fec6591 10c77c472e79c4c92568c758beb1d83952ff57cc 55d2f6da50958ae40176fd18878d7a6aba5287d8 f20cbc9061e79976bc18a47bf07a01e01b4f49b1 7066f29ccca026932ef0e9ce3beebbfddafd2e58 7403b57b7b49557a7563c6c0960e47658d0f4a68 7c2cea866be67a6d63e19dbcc449fe9312e2f724 fd5afd7ccc1ea46e166e26b82065e3392ae28fac 9547b13b05c5137e18c4a433bc153eafe42694e5 b0d78134964fca0ff03ac434771fa0e4c669fb06 0d46d56e18000f6f41afa1daf5775117c50ace11 728cf48529252032e244d6a6aaa6ab612f339b60 75dfd983e1f11468f70bfa2e5c4e2b061a8f3bd6 ca076badf4cfa4a30cd4eaa4229f4d1ec0e13d34 b741992d89d1eae7717acbe00310e119d1f2a54d b26b78e8a3038b362c1b2f6c85c1036cecffa94d d1ce2d4b1b6ac7759c625dfbfa6cba7fc5984736 e4e8c9dc5b893f29ed8a4fb10fc44291d5da5f4e 22359888747b76c18d06e33ce3449e78433430f9 eecc3b19241d814de92e3f260d596d9a88e082d9 a21ff3d88f36f493ab30d54154db35267d11cfd6 07e7808763a66dd3c66e7f29dd41fbb8e5e52184 ec0645e7c391bc20df24fdb84c2c2a59d20dda70 3a8de6bf0ec93a65a436fb66cee26ded9908e833 4ac4119ee6d810d9f3c00647079c26328a02e0b2 bf6bbb644ac07efd0075f121b30956fea488e6fa cb884e519dd366e5a60dadebd1bf4d59e0dc4e4c 690a215168d7190ce81740e7e1d881b4497f2896 84e852c0d26d42db65dfc5f1257d39829db30bbb ba61645f87a415d156003ee95bc4e253c68d22f5 05b9d3c2a18125547ccf4f873d2ce6b683ff8af0 3c8ed8b77c25f2e06ab01a6d7f1f0c1c2cda2379 af700ae33e1b542a5736f45f175e8ba33a42972e e11fa4f2d842976a93b25b8e8488da6c9b7f1437 0a95b39cd244048b9b859a72b32c88cc51c21d9f 8d6ec19bd7a1cdd506eb16bda7ca43b2df93fb2a 8b3f3d3cfc788d71258972a7d94ad64c88173866 7c5cee2703a4a5865482d4d70cf304850d0c3727 43b911f247fb2b96148e06adbb2c2737af5a9748 00563ca6f10c709072b3e1abe4e0247056043fd9 181ee5216e7afa53ab4240e460a933eec2ad4489 cb8e30ae17b31597f253b95e8481d33d2cb32b91 150baa15a6e71d266f7911c6629073d4c0fd65ae e3acd1dd775ba3b428501a7e58d544a3cc28202c 15a396c9263e1f318e27eacda23c5daf9ca2489f d9158b505eadddee855464813148c57063e1dbf4 a8536b787c52af81a96d2a97237f1f5de87631c5 939a6c3035130a95cfdb12c0c8947460c0a97777 49c396d34888ab63d6c0125d278f0f31503cfdd1 b0d4f0762ae7878109fa6f9c3aff5a4c10ed98e0 05a1b84e67cb3ab16d46ad642c6085f1827d44f8 d662ab961bea61ac8fdf98df8220335e55e62529 d9f7bc3182e7573312b9ef9149c607ea81e2eb0c eeaf154fcfc198d0a2e39f718bae627136e5cc53 11aa2f9c1c4eb1612c3b84d0c23739d8fa0bf246 16f83998d01603095e30ec6ca7445b6040b9cdfd e6db81ea6310e470ff8dfdbce1274fb8107d1435 50e5483c18f454f731558190a3d90cfe8340afc8 b60b484aed72208a1d5b36d1f7b8e25dc94d4c7d 74b30b703ec4b25bfd31a2d79732212a45ec1567 a459e0ef83fe1b49382be05ed0fff373b005b5d5 beb644f7be38329bff8bc360be4f1ea96a0732e5 201655de4219a454de569f66367488cb19ebbe30 c56a7d79408f3ecdf50c29b783eaa98c7c22c83b cc78b0361288dfdf9784ed8573ff3f9dc8bf5fa1 d7477e16d4d4ca41525d85dc76a7376b8b3c5704 3ea8cdc69236c0a42ce96246e0f39f0909154211 d2323f903ef24c14395ec1ee5e1c36aa3722fb30 4d4d0e5de17f10ae33a0d564c44cab0d6706e7f6 1a7b5ec4c549b377d2dc85193552eccd589e47f8 17509ef4d7813a9ed50c741c8dcf1d804d2de281 525029b4a442710874a8e06fbfea3167e40e0e9e 59f67caffc23143d96d7996def7ac4cf278728d3 99ca45f8f4da57a574b1c4e3330ea4a3812ee2b8 4b4264eb7979ec03dd746f2c676535ac72109ace b273f4818143d92eef84b44b1f9bf3f6a0c1f60e 4ab198bfca1d488f9bb6d10fafb11c62aea76ba7 a7217ea73c98e24739f89a1241551ba1a12ecc3b 310f640a5b008ef2eb814f0c1c6271896d34c1c9 d373d0f6ac93b9c24c5eeb1823743551f3441206 bcaabd91055039eae150ccb8368f35a0380d7492 10fd3ec7afebfcfd4ba59faec1f6ae67f5c0c83d 3a8f83e9765e4412b227ee83f40fa980ffc6b852 b59942ddf2a31d47f24c5bbaf6a823887a8eb1a9 af69fb28550e7a0a938dd1285e8909aea1648c0d f70b08dca989727dedfac3d2cdb7c87e39679cb8 0d2aab190e09a6ba056468517056558fba5f1f5e 22f9f9d0b3011ab788def6f1d84d5f292bbdc533 a497680f42ea4dd915e07a1b20664b9ce13c0242 868f3e518f29c2868c047612ffd789d35ac4a0c1 160a21fcd040f545f0afacfed56f750634e9d496 22d69d161923e553a3ff11e7f544bfca258e5d12 726262e0761bd6e7e3de20d0d7ae05ae1e8d3eb5 a1295c19a9ad88864bd333324fc3b0760ca7ca33 9eae380390349d4755068a8b37a2833864481e19 a91cbdd10474d1f1d42469e27e11b0ad385448f6 926512341026d279f45b3c553e7ed4e85ce4c69a e271d6c52fda8a2dbb9b632d135bb34e5029cb75 1a48496f466b3cb51057be0c553d7d3bd4f18a7f f4e8fe0b477e0b11cdb3a1dddfe34dfe50de8ad1 e05d35bb728f5e06203682e56c012a32ca53d5d0 cf9c8fddbb4cbebc0e5b6a07b1088d5457df8400 88a7582c5504743f4034bb4d77f1fcf3f625275e 49020caa466bb4f3b143582640c2e0343554fa23 68be1d26debaa6488aae77c0b25a41c0f6e37ff2 e2c84db24f2c0e7bf325dbb33d07d8918de676d5 25456bf30f3c1ab875450cdf82421679cba7ce0a e180cf4ad67b749ef04a199baf5308816d8552ff b2b64cdc99f4791b9cf970b8d9f7c40c5e515ad2 d98f8d14e21596400806ba12a3003d20bf70f00d 8a606cc863b6cf65b50d65f3f8bf6a0cbccb1030 d5b5314525ac57c44578c792db97e1d41ff70f28 b90482202fbe4792b507d5998f7b54df65246dc8 37d655f35f4a836203db0e757f3bee35428d0c7e 5ff1ad3760923ca1b128ddebb5211a7258ef1ac6 c7b49a3bcd9baf5e7f5ce14fa326a32253e876b3 92507d118ac682b48bc6dd4ddc439b050b499341 7075d560c0210469f41c1c9f9ff4568a84253e3f 3249cdf9f1707fbc5e9b9a7963892c64d9ec2cb2 3b3505bfd16a7e09894c37f8c248e9b1fa254726 10278e95cad8f2d15f2d1ea13add37bf25d5a7fa 1cabe83b12e1ac6fc9c436a872d9367ed5f75d10 6d1d236565059ac1498c919c48509815c44a1f56 1c7d82cdadce20bc9d00b5f3bceb4754d42e0b4c 78b0f46987d1cbbbfed2d114704358cb12fcdb57 a206f84176e40ae396058351ae4b2c5795a70ece 5a3d0bcb29ae827d41133eb79dc95c334bf7974f d5184b2f5effa7577c9d73e443d8ffc9f72a55a0 4ade51bfa869e63a866819d36176d9249a8c0d6d b685ebd229ca1518eacf0b4a3830687fa7a1704d 6e0c0ec263eeea699e26989fbec0e4d0c9046805 c61bdae7c51dd7a4dbf5147297176c2b7d5832c3 154b8b995814ed840f20f67be104f50fc720b2e0 deba8779f453e7806ecfb4418df26ed2c30e8da9 14bc2372caa43e6de4bbb9906df4dd3ff998b3de 62d002102c6ebb2bfe94518dbdc7673476720933 cbdfbcca476c1318c0ffc70f2b98a421ce42bba4 fe67caaa83677b66f5031951f3c3cdbcd7a67239 972c72d75c3acbeee1be8d8c28962e1cc9a130ae 200fdc6d1d2a7b8c96237c0e6bdfc8d66867e640 65e6ccffcfc8e90b7acc1cd6cba1093005992aa1 465a5cfab80f539c69b1f58e4aabd8ce6e22c122 f83612ac31145b977cd7d39194dc88a59ba9cbb3 7516fef3fbbf42b3319ef1bce134596c18aed133 841b24b37c067cf78824cb463b10c8d11665bb4c} 7f57708da22668e7570629698777e229d5cf8039 060c3831efbc1181e71e118889887d07b90e3dc9 {507274742c7204915f6927c8f5659bf41e7451f7 e01bda3978701ebb57ae457268ee57b219309f4a c0cad5f0e4f100171a0c854be78199be240d4ed4 060c3831efbc1181e71e118889887d07b90e3dc9} 060c3831efbc1181e71e118889887d07b90e3dc9 0d963d34727b66d4a95861d2d383ec212e1156c2 {0a07ece1c633bf616a5edda8febb562261c2193e da138db84eb792c36af1779649c3670744a76370 7e246a71fb279868b8ff9e2138887a4cce2d11a7 d61ce57e1e353c2de8c45344cf7fc483c0a674d4 d41f66f18ae2ba86d66e8bc0341e4073b04d9948 5c6620c8ea07b1cd9eed7dffec0e8748ad3fd92f 5e8e155f9607693335e9ef6ec72b3acc9216be26 77ebeb82f828851d0b33fb18f90a486ff2b62411 58aa10258b47436a11c1048366213661f3075088 d1f59f123f5b755aa658c6260151067a56bc0ac2 0481f50c9d234de822db05f5e95cf395e3bd774e 3cc679ce3f7b8d989875a64cfe91e5a2acbdbf38 0b3d32fc2a3afae0f063a38013f5c5298804edb7 705cc9d07898c828b7a8c9351c57c0b58440e9c3 c10d8ec63e2b02bfbbab54f92f06f122bee3e347 c144ed7c11d142c71495a88b0f7627171f01c8d8 05aed790f2c88849b1b1c46a38397f6266af0f09 e215a5288ad6f988db0deb73a55b297adc4ffa97 cafbc6814c68fca829503dd6e2932ed143e22b32 880b784096938c5ee79e789f4b7862bf8b0f82bc b6f290b364f93381ec162bf1c664049fa1b6e704 45e908fde2a2e998e77487bddffed133abd0db1d 2412d36a91f517800cb31a1fe2a5f48d9b088789 06c6fa8419d631ca17b14d1df50fa2136500b3cc 26c3ea81ffb1f2fb17cbf1830e6270b3a3375541 a9e4be2a73bdc73797dfd46ea5024c106ee50ff4 cc31ad4108833850c3dcee629ab51096091f8bd0 dbaccf136df23afa0a8b7cf2e75ffc2b100d62f7 2854727694b875589c71d04027fb063c747069b6 3b1e0beee932797f8f5e16f5792ef7b6d73e0759 1ead1fd3de03b002e02d866ccb13131552ec3048 053f31cade373dc21db7374940bf3730d70342c5 afb0e274ecf3b02049cfc97bbe72ff00482e7c7d 52b13d2d002c6d47c450cb59da3df11afbef8e20 1d7f8e61903c5e206ee5fb455aa20af160a3f565 ed10222ef58ba403137c370d541254efaaac545d f9783d3ee4a8c962d55aa37f297fb7ab41acfe2c 505f339c6d9ccee05e1b8124fd6bbaf3ca3d89e0 4fc90ce1a59f5ce71acbf5d54cd926859cbe5f4d c1df4c1ce1b1cb1d4c6befb16fdccb52b9db78b5 2065a27663d2e19e8afa0d348507231c13d9782a 808e70c34ddf13843ccb3685f606f0097d4ca092 7f083f359fba4a7e1ed67d0559dbb099d761c596 d2fe83f48f62a590ceaef8d78067c22d771b2e7a a1c12e9af4a5812889b32dc71689058a1487ba35 ea79bf6cd7a3ecb9151ee4684e1b88f00e9ab2fa ce155a1fc98e0823f7387fb638f4b5978b5806db bc2364ddf1d4ccea6d8a1eebc94c3f058aee561d ff551a315a37d2bb1c031a3f8b6e0f63db86f005 6ba7ca0c411ee0335578e9fb9e180e560923a5df 19062b3707eddc92364a6956b37dd0a2d3e8766f 377d20d3e117e13e7f37abf53283e3a1f22aacb3 76b89d1fae48df5fac5b50ab74aef48d5dca1de5 3cc3c5dcc4ca3a902e200f92f138ed2dfc89725a 1a8ec7eb5c49f6d65109a78d726d8cc0ad6e4806 198194680645b30b1ebdd3463076312126e6a31e 9eb970285ce7ddf5e289496bb343f63441ebc230 b65297cac6c2604ca0301c9ea9c4aecefc5e78a2 c71481381655d329eec218d3f89ed35c1877cabf 2faa88d9e9b5dba9da8371c43be5abe67100e10d 4df590c5e59607a46b33efa0de7d0a5659e81d86 0d963d34727b66d4a95861d2d383ec212e1156c2} ff7cf18eb9f53ca0007667716a08957bbbe9edd7 b3ea1c252943169c9d86f2320816753d718717a4 b3ea1c252943169c9d86f2320816753d718717a4 b8bd255470868e7aaffd25dff406edb34e409926 b3ea1c252943169c9d86f2320816753d718717a4 b3ea1c252943169c9d86f2320816753d718717a4 b3ea1c252943169c9d86f2320816753d718717a4 a14502015ec444b4c133b6b410c9a2fd376d66f9 {30d5b190fad1cc7d3a1547002d2f8849af2657f3 92cd2dedfc6d5f7a60d91a293eaabd576ddf5dbd 4620de45b5729f56c331f497a9be3bb214ea7579 ea40ff646af7329e0ff24c2b55382db8925cd12f a14502015ec444b4c133b6b410c9a2fd376d66f9} a14502015ec444b4c133b6b410c9a2fd376d66f9 cdb12941c45c4012b11d94c7ded164906c1e4595 {22dda7fe340e0f4769e4426b2e37f16b553ec89e daf38c888ded7b2cc1a00af9abb1637f4786c5d9 5728be9af89b35f0e88c8f382e597255cf7bbcca 28c7dd69ca6ae3430efc10a15bff40c939e7145a f476e3d54c0787bed8a74697c57ce5a37c570f82 9602dcfaafae81f8590848c766f40bf0b1d1afcb 7bec7a8188a5000d5a718aa2ef640648ab9080f1 cf34ad21b9a6b4611b9c403ff3f5aece2c08747b 44c15bada4a46827d2773690a7e83cd4cc49d860 d67f591003a9b6d4078830e1387d9650af74e0cc b5d07373ffc37847470d3123edee195ef2692861 f3c9b92eb166f1ad929f9a0d791c933278cd8678 42c839a50627b9a1d7c85972f9738284218d31ba 01a81e124543fa25892da470b8718498d7925bf5 83e238027b783774471e5e0e525ebf04c0040455 4f34eb7485b0ae99462d3aa3fc91c67f2489741d 9e8b960a94b18124530aa3e80048fc3181e535c9 8aa894f37cbabd119229ca19fa16d299b2d0ea9e fdc97c03e873be948958cb933fcd9c2f6986b54d 271f745b8514938a0d3341e60ffe23b91fd2dd08 a6b0e568511f883231b2194e46253f6e9a4fb1b0 3af6521768c96969ae47db6f677455295c1eefe6 d0cb64b880319daca22a0dd0a3e053e65ea69d4d f828448c337e63342018375fad6a020e60c67101 c865efc090daefcf2173eb8c6418f56455cd81db 703bc52e0df33b7e968673d30f0216c5a2bced75 883e3a1e08894fcfd3dab6dfc161ff60621e5069 4c63f9219c1f5d9307eb44be950bdfdf69f4e880 105a249b27393a0b40e0653a10428cb94f6466f3 be833481d39703abacac467e38b46757a736d3ad ebbb3acac168ce95fb398f22a8386d9a98be63e7 072dd14dd202bd86bbf17fa48864f2b3311d8f4c cdb12941c45c4012b11d94c7ded164906c1e4595} 7d49aa5cb1ec287465e7e765154329c0c6832164 0d963d34727b66d4a95861d2d383ec212e1156c2 {e7443f50a013b3be39cbd327a608c24a039d5978 1935a692cd80e0d06e305038e9b5fe7872805ab5 5e4cec0b6bff613d855a90dc76cff1bab637d4a7 0d963d34727b66d4a95861d2d383ec212e1156c2} ae5ca33388451429d6d099efa5d0336dca4b9877 d3addbc9f08267a6188fad7ae873827a020d7749 d3addbc9f08267a6188fad7ae873827a020d7749 0d963d34727b66d4a95861d2d383ec212e1156c2 66f531655b29b4a85e2d59c51665141cf06297e2 {4b4c800b0ec88f1e068718f159bc04c17db8394e eab192a206709d40c27a261dbe42c570d7890753 6734bf72a3210148a48a596c08b5f5a1443773cb fcc161e7dfc209b76a9d7e416878d9c0e1669525 9eb66245a1e35ed4679ce8f237610e1cb4ba8160 4279da1b31051cd4fb8749a0d286691bc5ee9c38 fec002279290ed057d57cf20c09bda0e13455adf 66f531655b29b4a85e2d59c51665141cf06297e2} a314c921711271803567ae53af0026bb8c891155 f9fd25d95617a803789cfedbd6506713810a88da {3936836df08f219ec3f249bf3cb33fd6a8b9c3c6 f9fd25d95617a803789cfedbd6506713810a88da} f9fd25d95617a803789cfedbd6506713810a88da c4b332f53bfd06985ede6d025ca1fd8a3e029098 {fa0c35ed34a64867ee49435448c90acb7150b49e 3afacc2d7cf85e56ad80a548b3ac12200dffefe9 0f7f030564d38e3c57e7d56f2fb34d198659346e c4b332f53bfd06985ede6d025ca1fd8a3e029098} f9fd25d95617a803789cfedbd6506713810a88da 66f531655b29b4a85e2d59c51665141cf06297e2 66f531655b29b4a85e2d59c51665141cf06297e2 66f531655b29b4a85e2d59c51665141cf06297e2 c4b332f53bfd06985ede6d025ca1fd8a3e029098 c4b332f53bfd06985ede6d025ca1fd8a3e029098 c4b332f53bfd06985ede6d025ca1fd8a3e029098 4b68eb8fc8ee0b4a8bde55a7b7555d84880b1c31 {627b69b1e516613c0c4ff2c1d2e587e6dcdce46e caef3c81b394185976f1ae31df57c0d5c6dd5724 4ffe9280495a4f44a15f14bdc0d78791afea02e9 03abdb401e1979a6eaad968d2b50fc0f800885bb 4b68eb8fc8ee0b4a8bde55a7b7555d84880b1c31} c479e4fde9fd8744cae2751402c94ffe011ac9f4 4c5348aa7bbc8d0a4f19c7fe73712a65b10f4056 4c5348aa7bbc8d0a4f19c7fe73712a65b10f4056 088d2b8c5cf159e9042c53541e53649c9ab846e1 4c5348aa7bbc8d0a4f19c7fe73712a65b10f4056 4c5348aa7bbc8d0a4f19c7fe73712a65b10f4056 4c5348aa7bbc8d0a4f19c7fe73712a65b10f4056 5a3ba9f5cf710fb9e872915000a593cfee3ac466 {e88746c10fed37e5202c59aabaf5222612a47740 1318322ab86c5d98d2032a179b2d37128c4e9e95 d3be66c36d47f86c1720a681f71cd66ed73cdd7c 5a3ba9f5cf710fb9e872915000a593cfee3ac466} d3addbc9f08267a6188fad7ae873827a020d7749 a28b5aff6e3b88cc02ea075a3c4fbc950f504f88 {a5d8a4fb4b2d803b4c46540642f899a4050f32ec a28b5aff6e3b88cc02ea075a3c4fbc950f504f88} d3addbc9f08267a6188fad7ae873827a020d7749 f23c4c093a15c3bc03f3e2bde176f6b3c66a6291 f23c4c093a15c3bc03f3e2bde176f6b3c66a6291 f23c4c093a15c3bc03f3e2bde176f6b3c66a6291 a28b5aff6e3b88cc02ea075a3c4fbc950f504f88 {c650020b38779a970a6217c512b592d630d85f25 a28b5aff6e3b88cc02ea075a3c4fbc950f504f88} 176dd839f0c6ff62a6a08f97c6f9a0298d2adeb8 a28b5aff6e3b88cc02ea075a3c4fbc950f504f88 a28b5aff6e3b88cc02ea075a3c4fbc950f504f88 a28b5aff6e3b88cc02ea075a3c4fbc950f504f88 1d1a106dbdd85fba24a16016efa191f147fed855 {9578c607c5dd3b7be0ca43726dc5f0b3bddf05bf 1d1a106dbdd85fba24a16016efa191f147fed855} afd4558ec6893b776c9e7516f0deabfee1354bc5 4b68eb8fc8ee0b4a8bde55a7b7555d84880b1c31 {8d1f3ad1629d4584c2ba047cee72304f71dffe13 a456504180be1693bcbe598b0cd0897abe33c082 4b68eb8fc8ee0b4a8bde55a7b7555d84880b1c31} 4b68eb8fc8ee0b4a8bde55a7b7555d84880b1c31 d328eac00f377d528b302a40ad7b4898a867812e {2d7ee19286ce1d204ecec8be348188cc17e68735 f373fab45079565c618f78f13bfe5fec2111724b 32be29446fc1e42ada5d26a214dcaeffc9f89e28 b9afeea46bb054d3d7f3907eaaf780a539e644e1 30024441a483f22d73ea0091b2570738e91a0e35 7e666a55bbd12588266f18ab77799674a034f1dc 1b4bb352cb1cc5ca912b69c2f27f13825d152e67 84074c87bf9af93fee8c92607474aa9822298e58 3c4c946b9cfa80f95b5ce0f47ea799dae886ad54 24c2cad08bebd0a5a9f7fe6b571946e250bff042 be5db8a830b8d3d691faacaf73848b236d5be443 8b7b2e589e079c4aca76c81b171964d175afaf1b 7b12d90da86a46ef6553cd34bc9043386371f204 d328eac00f377d528b302a40ad7b4898a867812e} c9486d34b549adf349a3f7367d7deb0f12252ecf 1d1a106dbdd85fba24a16016efa191f147fed855 1d1a106dbdd85fba24a16016efa191f147fed855 1d1a106dbdd85fba24a16016efa191f147fed855 1a8dff0a0f0901e89f5769de9e7fffa9c9435030 {421c2e02854612f71d7f854f45d772bfb9ee9b4d ee5cdc9fe2dd6b344e13724878e958ada2d19259 769b5e7074b9cf67d61bbd1dcebcad187ee3ea41 d6ab491cec8e3b8bb385f0cd1eb7a6f7e3045518 1a8dff0a0f0901e89f5769de9e7fffa9c9435030} 4cca9801e5ca24ffa4f0ba335c58fa67de052b00 1a8dff0a0f0901e89f5769de9e7fffa9c9435030 1a8dff0a0f0901e89f5769de9e7fffa9c9435030 1a8dff0a0f0901e89f5769de9e7fffa9c9435030 22875a50e6c5d2e54ceda2b20c096133b7ee5c92 {66d0f006b731a36a14ad354ef05c65f7e7c7cf16 923ec64a53039a374f0c9d1f935cdfe2bc66ff20 b4e39e9edd9bb378eaa88060ac5e7d66282b3dcf 7bd1be274ced58cdd14e088566110291d6fb6e43 8a4b6ac2a4364e1ac0f8484f99553275a2fa2895 371de595ad29c7a0f010ad655170da21c6c8bba1 e41aa4c61e1d012c6b1982f13c2acab487af12b2 32ecc079d44c9030327405fd2d99bd335dc24b3a 25699c7b43d24cb94c26960fa7ba2687f31fa75e 40854970977192de9b1040c0847661494bb052c0 611cd66f6df50865ee76e5a4dacc992fdffae879 6d6327b477c65eeb990698b96e3f81da69a13134 8388be58e43bd4cef3aadf8a2aa2f6151d9f582b cd0cc42fe16ce9bf2fc33c3631d73c456d88e250 a305b3522ef89826a7d6f04d103fdf36721f444d 6f7ead614d1e34ce91f953ba01d2c7348e1560f6 92ca9477bcdbdb148c6eddd21ebc37ae844ce811 884003a338e1d9c15befbfcfc2b4ed50bca890d2 45e5b76f370b61e9efa621ce3540c2fab6310fe1 1b4aa5c4eda39bc263ca7e16a1f0c65491e0bcdb 4dec1fbb3ad094b1af9592032c66143c241a677a 7f46d85037819b67d4c7430f7451a2643a20e661 6f8a760198d43297dfc7440485f01452d8666c44 f2f88a7547a5ddab0f3c37fe3b77d0da5335989b a5de81dc79582df26b6d3cda133f44fd7ea3db2d cf09925352c1578ff3ec2800a17b647ac7ae33ad 05e7b21b3c20ac732b3c76942ceb5aa70bde35a1 69a0a7c0c8ee74e73b264c92237a195e577aca76 a772c4110d976d78ef761bcac0abeb74b3eec2e8 2b540077298cdfb1ce51ae582d1a8181f07e0b2b 06d13af77a6e0031414390ed89eeb90f904a4218 091eba6aef9f4604d0fb6d9ee1a559af347b1ce8 b99f18db51e67a2961c957f85127e9bdd7f5faed e12a96effd3478aa823ca67918464b699701797a 0e571ce11fee4a2ff381f2feea6dd5160c2372a5 56fc534f89e8d6418ad8bc04005e8e9305747ca6 7f572627761fde9e70aba05ad707245b460cdb76 d6896977cff5319eca819a9bd302f0738b86226a 3bede71a2a9a4d9d1731f109d259cc4a75e3ee7d 1d30229a948dde82fd2149eb403d5cd65ddf5e22 25481a27ecca66d142bff35f88ad5a2ea17390fa 66f01155fe068378f5c55e5502ddb39fdc1710a2 51fc44cdd24295bcfa1bee04fcf2f186ed9b531e 543bd639eaa773f36c7fb451c7b40f67316c7864 4f6c1b478742a34fc2939468bba9cbd2bd9c6133 32cda4d1d9adbb4d4af82523770eaf1061c9ba27 e88af175df2081360c70e3ac62a9b6816f00af78 9006993f00a01a29360deeb153c11a165fdfde68 49fde10a2a20e5b2ba33870f32a14a280def2598 4f895e0bee4d3bf19ab3733ee66747d23a74261d c1df3ca1a6d579a06023558243b8108e0bd5aa2a 32336797598214ad2c11216706a8a36222bef74c 303da389c728553db3dfcfb554e337468bff6a21 764361bf541ec718696627eb5d7ca633407391f2 3cb85582d3041e738f8874b078b4d5cba6002f57 219600d0d1e263834699b12057eaf7db8bcae7b9 e7b9b0cb6aadda42f23e32f9fcb0e1e80fcaec63 8c31964a0f3f0bd504071cab225e7bf5b786fe98 0f2e05a9440e849fb391dd464e4022942e11879e f32ff104bb7a50d1cb05e8b77378399fe825ed25 65ec178b4a4345de2100300fea94c9710966557d 954b1846d60307b1fd283988ce4c657b24ee52de 608019a7160f88ff33bbaebfa0298de597f50e75 7f01825f6bfbe4600f7b46c9f88030ae0193a3db 39735f0579cf453b4a8a5fc0bd48b3a7b6035f35 8bbb5d336d180479f03c995f8b097182f085a13d f5b5592cfa2a4b14213b459eeac9221ff40e901d fca86ffd2a9ee8f0aae95f3282ef90015c7950ad 0fa159ad4978ed7301cd85582c706c7441818974 6d53bb385dd24e279a0860229b97560fc11b3faf 2d2a4128924b9eca26b71c010feaea8241cbaed2 4e11cc84f5495e208348b8c95c8abd348e45567e ed7f572e662eccae848fead12cb75f4dc2aa5552 e31143ad340098edff96ca8463f7124d1dd005b2 c86f87c557a86a8fb1372f8d19cfaf4e2ddd493d adae28b9844229390f7334a1fa57805a0629a0bd b742e281949d0408ffc9886cbf0b14896df1edcd 8f2d36ec7e50524cfc37b03476879c04936e499c b3957e262e025c1de5b7f55c04733eba77136d4c 656c215bc6d17b904bb4736b603ce5a2a20067db 260e09874a259f301cbad67741c45010a75da35d 02113fd8b456e8359886db5ba1c866ded1e13d59 02babde6941d5be3308255f1f28374201b609fd9 7f8044497c0350a9826cea735a490684d9b09581 15b2a3930919b78a462c99d45091d14f1ed2da5f 4597fb5bf7fbcc4c1ed653cae7b319985844e2fc 35f1a6968047d512c7d3af734e5e76c7f7eccde7 70097de175dce28ba1f082360df0b930a1356182 97fa739ab083a109d59d1e3d995839252ccb1f7b 0e618d0e88db4ac7a66a8b728f3023888a66646c d1a42682b8d081e1fe988c9f542272d84e1e6cf4 dacac142b5d9ea6e275ae70506d5690e19222eb5 b74516dcec299e0caeb72220a68b625288e19c99 d822eee91d7a98b66206e032901b9a3c252963a2 16151a654ce68439eed38a02883891a50f29b00b 0f55427174887473afd50f6423bad6fb770e3a02 23481b32301d721048334e26cd93d3c0fce6a781 b0f8bb46a28fcdfd9cb87a2fd274d7d5ee740368 8b0da2aee2a1bcc3c5c85a4a89bb8325378466b3 5a266c058d1431d6d0962f4b2d6a8b529feac4a0 1f17b5f6f6d57967ea607c6480552c84742d69a5 853ccc5b477b3e04b0af21741c7e5162dc1fd6d9 89c108b83e771e80c6ca1767de36243b7bd20cce ab197c41bdf94e4b6077df75e18a7b272d22dd5e e606424a5bf05fc1d0c6f3bde7327ba6837ef1ef c0fd63e0472b856b0cb43c4fd14aacf3b0f6ec1e 0ef29204dd946df833c828cf7584e3d27c4c2b0d 914d483c21a3971fb671af7d7ce7f450eccd183c 937a3fa92589ee629e26c552f434e6eda79ed027 7727e7a77a2cb1dc18395698d75ae2c5751abc25 390b54e9737adea66f9b09835aa0e6390149ab83 e7ba722891489cec7c594dd89163440498d82ae2 ac3389359481d0c767f58a1a4272af18a8a95ef0 912adae740618fb7d57192e6f6e7e58c158c35fb 3f0c398254e0c6e1b71ff0ea2aa4094220360ef5 017a21dcf671bf06b566edace8f027b1e7382546 2c5fddb5d49510f7325094bdf646907a09001a4c 846f47da022c69cbba8c17a7cce7f9f792f970d5 00cb233b5910a5ddf3815bd9b711eb4ed10a0c26 c1de56fef972d00ae8768ba1ebaf15ac2bd3c3d3 5d78af30ed30d55bbc85f20c344d545560dbc656 9bbee62ae0dd07ba1d6f82d0308e993cd11ef923 ed3ef7bba9f8842555af4eb87d95a58ed4dcdae2 4d7793a8202bf33cc781e6f1b7d3df1db807f146 9d927baa3365e48e70fc67419cfb09e56cffb68e e6c70f62d68cda8dbd0666841374780d6c22b363 46d17ff85203bbd12966e522edd7ce1281df3426 35dcd8acecc9d707838f5ee1bc5ade6c9c466f2e 9b105899548cd0b0f673980965eff4827ae2dea8 583048d2b8dddd23a218257eff4b9f5e8a110912 d3cb9954f11bab74b03a335d076bbbaaac5a73d2 f3cfe8b1682a2d305a1e09f63b8fe42d39bf308e caf812a279eafcbaf80c0b9ae28098bd390e77e5 2922e45ef46d8844b892b7d2068301cf9140c320 55b222726957cb7b2644873c9e1e88fefc94231c c88425eeb4d606348dc0be81567db76f71a654cd 140c01270a49b4d656b8a67d6fe5a7f373065cad b645a1e6da811de5f4267d47cca5bbb63a31ba63 5eba520ef2f8002f9102f1eb5e68415fd0152b70 9c4a3217882149140610caeab2b5b99c20839e52 45bb052c61f6817ae78e9a5565a8810170ca47cb e9aee7683808629aad040334bedebbaf31c7a370 a87ccd102af4be9ca165312712069c3f3c3150c3 6ad60e2e6f381f4e1c495d0c5cde79b19dec4cba 8efd986db89621ba36539166970672b413410f14 df2e2845ce90d1d6411e2f0c105cd849096ba1e4 a07a875c9ba66900422fb6f58e64c9a0e4a59e4b 59f6241874f3c26d18c3cf542a71704c819832e7 155f3ea17f830deecc49e44f0e8d5ffff73bc095 bab5c7c14b055ada88d45d6b691bb15bf5d5bf6a 87c2b88576186ed131b2507c69eb046d7de2887a 0e9ce45659720d63363625744c27fc85e171d155 1fc12871655f3239003994c05b8397a5d180b282 7a900c0c0010a94958434b803fb2ced46cbed81e 8cf52784f0ebb77c4a32645a20d52ac1c87558e0 f16e577c2d897253484cbe1319dbf7d404242c4d 1cc936b8341b2b2944e654011c1e562b1831fce8 bd88812dae1d5edf493f8d04443333bfe73ed7c8 8f48a222a411ddc9e2bc43558d9c9c0af1e5eca7 8b70f1a1d13aa93403e61cc5b1029117d2cdf802 768cb79d40300436870f04642cf293588b45ced4 25b16358cc8570f630247a67b8cc63288a754373 b844c5616d065119da962efbb5866e4c0cd1e4a5 629a7eda96a73f79ed24764dc52f36389a684322 ae1945dfe99267713f526225c88cbcc704b735cb 1ddeb88c578547f94acd6618251dc98aecbe4f51 d6d66d525b3e278bdbcaaefa896bf8e7daa43087 1baa90e2aed03209aba8b58f2e1a31d749b3d7a8 689b4998277f640b082b0d2bb3fb2b85459007b7 6fa578bf89509b480648f79cc574567fca53f659 ec8d7342603c8e469d7c9179f1127a2e09d943f7 9ae06ae7a10d8d55eb0edac9c22ecb6d2dce45b3 d2f227fa3f2ee6ffdfd7b8edbbd72f606c4e6848 5ea88a788a86047a0c92896d4adba10ec86fb12f b8b219c069a1dfa65051f12c6a7746ffd22ebd6c e5ead0c097a3d6ed5e81c374e6346f72576377c3 1c4918520d9bd5531fa98ffd2155e515b19f7fc8 129c4be837a34afa2ad7bee5bee7f455af7f3fc5 54fedebeb9dad8feec60737da1b7b71da6e0802f fa7ff3a8d44d872af047b7507f0504409414c70b 2865c04e8aaa0cf3c23e29802659bfac86cfd361 259a897d2976400b566425ba9515fac17592916f ce7650156b1c624de5e080d6c16798548f3fade9 ed21766bf50dabc2862bd366ad30bffd205f9c62 14799572e05a0b433e1f05ad3587be21eec05a8b b5a63d05542b7dafdb455f27ffc8811a549b9cff 0da089f33abe550b8c4b7e2dc7a298bd33b85eb6 4f6cac76391e458a3debaad103fc2e0d4660e4c7 aa79ca82b38913d364eb57d86531aefab7dee69f fc53b1adbfcc19d7415fc7c27cd101d3994e1946 28886982b57bb45be71a7352f3fc18d11db8e92c 288fdbf0972e71152e8ad3ea1474ea21d05d9b2e 22875a50e6c5d2e54ceda2b20c096133b7ee5c92} 71c27818947aefc303a2ba16d22255a15b5f8998 d328eac00f377d528b302a40ad7b4898a867812e {553d28fb5b82e1bae366acf28069ba45e54249b7 4f030c79b00d948c1a7ac42c7ef1dba24bd04320 d328eac00f377d528b302a40ad7b4898a867812e} d328eac00f377d528b302a40ad7b4898a867812e 776e7a5f5c55dcd359d5c648479a8ae515c09a14 {3b4acb9d57e89585f07c8d367ad896b880205962 8b3563d2c5501c69805887c6919b0c166740e967 e77e93353a2f31d03e2527f1a785719f3e0eb44c 6f8cced287e4c65e73cc1f221ab7c2a997586a99 8503ae3ef743dcc22785b78642d407aec42d4880 9c642b04e28c30b58e6a7ede2f711fb0fc99a46f 3cdd2c5f2f6177ea50aa75d71133ed85aeaf12ae 54395019f9bcb3af527388d0a3561b8eaff715ef fa6a85d2a01a280e6c41c1f9251c8681af7525d4 1ab9d0eb6bc6fbda77f5e130f568d50f16ec8605 721ce254752ae9d6a2386645056d3c57609bcdcc 4f2698c445e045afe00839649ae4d8a1d1c204db c03352faf3423ddfd682009ffc7de778a4881120 559eade28001fde4d5ce116f9d87689a37469047 3dcf26aea3028a68defdb368498076bf6f0dfc0f 93bf7131f497e628d17bcfd04a2fd5d05815364f 664ac05961e03e5d383b3db497011e04c308f21f 354f5da6a1469aa235959a5e02dcd80738837105 dbd7dbc5b754e42a93639912c84c93eae83a8ad8 345ca748a394847304427fce4cbbc3a618bffdec 53930d974f43daffe471cc13adb64937543ff6f0 0d6e3cb36f98e3f8970e6f465a3925fa145cecd5 d77e55b1669fa80d412ecb34039a7f5809a9ffe0 407e077c8d476b517b0502483cf445beceaf5600 0ea48457ab1998717b835db539a84cc5c66bf73f 6faf01276a0d6073f1d4f1efc064075e1f111de2 2369d6a68734a290fd85378cb9d3fdbf2eb7c931 927537fb19b11412ecdeabc4c384e2574c90490a 7722d8d10756aa25f86852d943ccb59bd78a3e72 c59af556f3a670e829159b4a91f2724a367bfbba 9ffe4843ddeb7a7b96bcbed2efcb90b3ab32bfd0 6f1951ddda7df48bd518bc13b1a3aa10d2c63232 815c66da43ca4151bc92b92d9546357d0fd20746 641affd23374be9cfc15997ee6a8bd78cfc9a212 ba5de65ae071fcc01fd030c346e21795f132ec13 b68c4c149346e591991bd7fc9edc592ac51d09a4 9fd90ac80c1addb7395f6ce6a74b8354238e70c7 1e872542533f00ab4f5f9f9bfc628a3bd0adbf9a 6fea25d5fd4162dff145514607da095b36840899 29a9af534533dcbc0dd41df31b7b0520ea3fa964 a3d1f749232c90671b1b9eee36234f5d0cd805ed eb8bbc53e32d39386835781cb624ab67b801bcf8 7a00d421f08789f0e019709c80622b5bd5a975d3 eb6729a83839bd173214e5ae7410134277a13adc dcecc21fe1f17ef66ce417560d37f5773349ad97 76640becc1b508be54acbba0222d338a9545cdb0 f8e69b555c9c3130dde1446008ed9a3fd3bc554d e0fe227579a0c128b82ae9f089eb1d4951edbeb6 389215b9288abe5bb14e40bd7d359ffd763ca7c0 ebebd151dd64cadda6d492afd50296dce243aaae b5639a2c27151a678d4e9acf34c9f583ff31ced8 3b2a181d005232fa98387c3928bbec103a8db71f 5143731d7bb67fdb9bf6c5d096536a42d17476c9 37cc2c1e37f4a4e946fac3aa8aa4b3ab878d71a0 21e12ef92fe211327c6b5bfff2dc7cf682b543e2 682a1fb8bf4c58d3f0334468eeb171778c97e2c8 69031d61bbd59dcdba0abacf7e7a6ce9bac5b0dd c86ec0b26672310975d9155d9ebbe332939a7e76 2a5e011e1e8c009ae38cdbecd25736461ef08a6f a9b4eadb704f4d83ff37a69afce75ed54bf91f50 ce1e3b6c772876358744430e90c847af063dd595 4dcf1ea57faa74ec6efd52baf7a6062ac27041a5 0bef1c2e1823c6493868668c28d55206b5af1d2a 8b53df2a77e9ab0d12ecb61ae62340bdbec6f957 1dc15b26b39e43f505c16336286d9d0961fe7e34 ce48c9e88138f3f963c99d4f9540fde4d2ee1cbf 1303fc05f0fcd013ef411db031984608b51093dd 5e240296b996a9221e8e30414682a782e80c232d a74bee8f57760bf700d583793f8364fdf863d8f0 c6ddffe3d0ab8e249a862057f5575ca40e09afdb f9163ca1811b4b8d3df28e39e4c4d0875a805a50 d61ca81c0f4cc309eb83fe73bc39ed06a1a3fca4 66f9a554fd83e4f2a37914015319ef677cb8bef4 4e3067ef34125ea7de74a11e0481a1c9f4aaa81d e450b4a795f185e8de27ea5fe686a5c62318704a 3ef68dbf193c671d7df9a7d00c41dc672dbea2f1 a3564a599aaf07a660d21bb3cab4cbe7de70d3e0 6af30039706ee6eba2cb29f03368806df7c71d3a db3a7578e15cdc6bbb309618a6b0be76db598e32 79b9172dd72c45a95ac6bd9240ab74eea7f6d1e7 1400c39bcaf2f639c88f95081a0e3d76aa135e6a 5661b5c27ec81da84fff379f48a28be90e3d7c4f 20496ab47aa9a4ca7914b25c873ceae77c393403 d533b0308da44cad25c6b63bb15c7bea015566ca 0adb48d8a1458549539bc19aad5944002d18506a 9c72289222a7cd5495b725ed5702038affcb2548 51c174202f269b7f772cbb9576430feda6e7e907 11c6ec8a393211285676756eb397ddfda9573363 5e9d551586b1076171477f267f899e73af422ea5 2740e3841a8140c0ae0a8ad25fbe6ecd1be93f2f d0b8f542ab0fc7c0be35ed7b00cea33f5dad78f3 40cf853bacfab38a98f2a021a08916bb5a4adcdf e7b112db0e1e8e3a955cdb8bf332f1d66f66a4c7 4654f40c986dcb61a05aa9c997126d892d09baf9 5485fe6758ab332a159764273de0db7da2fe94ab 9beea3686c80f26b6361f15c1fee0e865b3b3c29 383c3a5e71e2b1a4f34e514d1ac4abfc3e43bce4 31fccafbd6c2522f273e1c0fcddbf7a37bf2e311 b75086349ced07d9edb70881b7455ef5eee5bec4 cdbac095b682ef55df7e23fa46a79b3996475913 07a3dc888dd9f58c51c42c26c552f1ab37865540 78a0934574217a3e5692af6bba1b7a4067c6c82b 61d764a97dd9bf954102926cc1d6902f6ce86bdd 7220c869c3af3db019aca57bbd96f78c60f5e492 27ed627cb2a1b075468dfc1d04264fa660f7ea0d 798ce268c3debd553fb8e53cd7ff4a5490abe4ef 565073824849dd211a6ca7f510321cc49f7142a8 f1b4ea7410664d64ebba8a41666f28a8e2c4e836 c5aebcb23bd910beb2d2bc1a37c478154ec37356 be5130920f6ea7491207c25ca4d93290afc62d0f acb55f0160186e9e53316c31f51deb417d64000a 2b1b6f0ed6528c23829b00f1d27ced4a55495445 001d75574790725364c645905ab498a350d255f3 30a7a8425cad2ce102043a413c06e8849ee7dcd2 f1ba6046bca4c28df55c9240fa475a88032d74b0 69875904da8983797ef7f7010af7854083873541 565aa7652852acf7604f920fe6f9607e2c67db88 aadca87a143e1c64b4e320bfac72bfc398c4fc8e c870ff8a7960c154feca8e1e35797a5e57039c1e 08b2301a84c90de538e3f0a20e3f146ef227c48b 59c2b2bb09316898d54d5a5a126ca5203c29371b 48c99723424bad53aceeb3f5da4def53ff9fc4ee 5e382507c3d66f03af59e0ff18ad49e19190ea9a 45c56e184f928157ba318937ca7ead2e442d858e 6f5555acdaf2aa06cbea0135060e141f66522e87 d1f3b9c4b344ca8d724f29fd6d6e3e28062260f2 4bfb5b6f778631f1081ce3dfe7a92fc449ad3552 cdcdc34f61648973c9c425adb97ab7eda47fc418 cf8a926a20bb614b2bc5431cb94dec4a5b48d999 45836a720712a98039e261c75bf88eba28ad224c 4e6ce4b924ddd584a84bf4f11938cdf35ca6b352 869a4ea42e2a11b469768c6a8944e91c53eef9f1 598107a7d02d2259898c3623478d356d57207080 1615662683353aae9ce29fc3c01b953813e17a39 89329f800dec92ca5e6f3de1108d9e65bd1df4f4 8cd25eb774438e369d37f5f3c09e2195240f93f8 bda13513064f0dd775b7160594801302ba6bb725 aeaafd679e109c9c1003119889a2514c25cf2386 22c2d8c4e313300a7ac09adf441f19533fc6469a 224aa5e37593a86cd10d960fc18086c27f6cab40 f3b8d98fd0d4f5c02637157706ebff362a1f37d5 8c5a13e2b4f901dffa6bdbf9cba1b1f366e02a61 eda7a843fb197f5b06eb4d598a5a75b62f982de8 776e7a5f5c55dcd359d5c648479a8ae515c09a14} 5a3ba9f5cf710fb9e872915000a593cfee3ac466 56a0456a49a78921d0b8897cda90d4e801b845a4 56a0456a49a78921d0b8897cda90d4e801b845a4 5a3ba9f5cf710fb9e872915000a593cfee3ac466 cdb12941c45c4012b11d94c7ded164906c1e4595 cdb12941c45c4012b11d94c7ded164906c1e4595 cdb12941c45c4012b11d94c7ded164906c1e4595 3e054d4f8b78af0734fee6007ae809d100f41466 3e054d4f8b78af0734fee6007ae809d100f41466 56a0456a49a78921d0b8897cda90d4e801b845a4 1fd7f3397faa351df387648005ac4739a5b540a7 {12558714bc4ecd652e42de66afcf71a5f5a3ed81 1fd7f3397faa351df387648005ac4739a5b540a7} 56a0456a49a78921d0b8897cda90d4e801b845a4 3e054d4f8b78af0734fee6007ae809d100f41466 3e054d4f8b78af0734fee6007ae809d100f41466 3e054d4f8b78af0734fee6007ae809d100f41466 1fd7f3397faa351df387648005ac4739a5b540a7 {2faa2015c126f0645afa05c3e7f63018415f9edc 1fd7f3397faa351df387648005ac4739a5b540a7} 1fd7f3397faa351df387648005ac4739a5b540a7 55578c2d1d3bf02872e8eb1a5078e0b1667acb55 {a3b0f0fb864cdba28772edbc23c1e48633c4e44d 76aa400b1ba3820e3c6970f18e5e9548cd4523d5 e5c5fb16f62bb6bd50cd6ef982e8bb987a797324 22ad2dcf4a12970be8cbf2489630f1f1e56ec4d5 fcb6071d224814f37a8e941c59afc340d2bd3f7e 66fc295799348249b1acebfaacd9d0b59ceb97fa 04fdcca56ef5af95119b20a1c81b4608edf8f8de cf485612d6482998e5ebc3922b7d0e70d07f4f23 a72cf164936636f7d8cbf462eaa26339d56d3ed5 ca8b80433e9929ecb5edca238d5a5cbe2a348d99 330a8e1fc38c20f43ef997a4de79940808ef3c04 7f3161a61ad086a7996cc0c78f43e21ede8521ba 719232e16765bd6a7c332addc77397913a8658a9 dbfcc276c357443aab32ce9aa24766a739043608 126f2083ec9fcac26e95902182074ee2a042a08b 22f9c941620db3db422faacd353879e2b59fa37f 1d5bc869596986d1d95a6711afb85d400fe1786e a11781eae8ff86094f721948cb064377057d3876 c63118c25b8cf7bd9ff7909b085310a8c644cb00 bc8ebd49dc9d84a18419705f935217a75be923ab ea38f37435eb26e8d103bc0e2fde8f3f221278b0 b7c338734bd62436f512e2c4f28eff8e2f098e4d 085852fe081bf149f6a8a319194bb4c44eb70432 97a7e0f53c3fd4fd532fe9af918b4262452fe36a f769b6b96339527763cf2de837e3ff2f422e3f1a 1634af5ed1464e2206513c5c7542c37f86a8ef66 5221dcfc00ea06fe73b5249a97102b645a62e7ee fbfb11664a264cb327d6caf84c82f0ff14a39915 13227db7cb7df4f2f395ee91fbb187c74464bdea 395d515a7be0d913b37ba49101e29b2340df2f88 04cd457359f6c106be1b41a90c8af3ac98432c29 55578c2d1d3bf02872e8eb1a5078e0b1667acb55} 989220ed8f456efa1448b96ed14be3d45a4890d7 84ae3d824b9e6842c7ad8f17142d52fd197dc213 84ae3d824b9e6842c7ad8f17142d52fd197dc213 c6281b3370e7eaaa824f959be1be942866770a37 84ae3d824b9e6842c7ad8f17142d52fd197dc213 84ae3d824b9e6842c7ad8f17142d52fd197dc213 84ae3d824b9e6842c7ad8f17142d52fd197dc213 55578c2d1d3bf02872e8eb1a5078e0b1667acb55 {c054c6cd5fce508d825316715a9d4a34ee36549a 969dfea69ce33ed302ee25371ac86a27333a9c28 3f6fef932f716e5b2b8d20bf7a14768d532185a8 51cc3e90abb36c1bb73f48c31d09093abcf6df3f 55578c2d1d3bf02872e8eb1a5078e0b1667acb55} 96692a87c5e28fddee956046822e65e7955811e3 bbdb80535f105681e19df4a20272671d849dd98e {517fa63044d54005a853f60eb6a16da4c4dcbb06 fed8a0c434604884c8a1752e6d6673330f3468bf bbdb80535f105681e19df4a20272671d849dd98e} ======================================================================= ==.git/COMMIT_EDITMSG ======================================================================= Sigh... One more addition to CHANGES.txt for RC0 ======================================================================= ==.git/ORIG_HEAD ======================================================================= e2eefd60eb66b907223f2316e56fe67d238d48d0 ======================================================================= ==.git/hooks/pre-push.sample ======================================================================= #!/bin/sh # An example hook script to verify what is about to be pushed. Called by "git # push" after it has checked the remote status, but before anything has been # pushed. If this script exits with a non-zero status nothing will be pushed. # # This hook is called with the following parameters: # # $1 -- Name of the remote to which the push is being done # $2 -- URL to which the push is being done # # If pushing without using a named remote those arguments will be equal. # # Information about the commits which are being pushed is supplied as lines to # the standard input in the form: # # <local ref> <local sha1> <remote ref> <remote sha1> # # This sample shows how to prevent push of commits where the log message starts # with "WIP" (work in progress). remote="$1" url="$2" z40=0000000000000000000000000000000000000000 IFS=' ' while read local_ref local_sha remote_ref remote_sha do if [ "$local_sha" = $z40 ] then # Handle delete : else if [ "$remote_sha" = $z40 ] then # New branch, examine all commits range="$local_sha" else # Update to existing branch, examine new commits range="$remote_sha..$local_sha" fi # Check for WIP commit commit=`git rev-list -n 1 --grep '^WIP' "$range"` if [ -n "$commit" ] then echo "Found WIP commit in $local_ref, not pushing" exit 1 fi ======================================================================= ==.git/hooks/pre-commit.sample ======================================================================= #!/bin/sh # # An example hook script to verify what is about to be committed. # Called by "git commit" with no arguments. The hook should # exit with non-zero status after issuing an appropriate message if # it wants to stop the commit. # # To enable this hook, rename this file to "pre-commit". if git rev-parse --verify HEAD >/dev/null 2>&1 then against=HEAD else # Initial commit: diff against an empty tree object against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 fi # If you want to allow non-ASCII filenames set this variable to true. allownonascii=$(git config --bool hooks.allownonascii) # Redirect output to stderr. exec 1>&2 # Cross platform projects tend to avoid non-ASCII filenames; prevent # them from being added to the repository. We exploit the fact that the # printable range starts at the space character and ends with tilde. if [ "$allownonascii" != "true" ] && # Note that the use of brackets around a tr range is ok here, (it's # even required, for portability to Solaris 10's /usr/bin/tr), since # the square bracket bytes happen to fall in the designated range. test $(git diff --cached --name-only --diff-filter=A -z $against | LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 then cat <<\EOF Error: Attempt to add a non-ASCII file name. This can cause problems if you want to work with people on other platforms. To be portable it is advisable to rename the file. If you know what you are doing you can disable this check using: git config hooks.allownonascii true EOF exit 1 fi # If there are whitespace errors, print the offending file names and fail. exec git diff-index --check --cached $against -- ======================================================================= ==.git/hooks/update.sample ======================================================================= #!/bin/sh # # An example hook script to blocks unannotated tags from entering. # Called by "git receive-pack" with arguments: refname sha1-old sha1-new # # To enable this hook, rename this file to "update". # # Config # ------ # hooks.allowunannotated # This boolean sets whether unannotated tags will be allowed into the # repository. By default they won't be. # hooks.allowdeletetag # This boolean sets whether deleting tags will be allowed in the # repository. By default they won't be. # hooks.allowmodifytag # This boolean sets whether a tag may be modified after creation. By default # it won't be. # hooks.allowdeletebranch # This boolean sets whether deleting branches will be allowed in the # repository. By default they won't be. # hooks.denycreatebranch # This boolean sets whether remotely creating branches will be denied # in the repository. By default this is allowed. # # --- Command line refname="$1" oldrev="$2" newrev="$3" # --- Safety check if [ -z "$GIT_DIR" ]; then echo "Don't run this script from the command line." >&2 echo " (if you want, you could supply GIT_DIR then run" >&2 echo " $0 <ref> <oldrev> <newrev>)" >&2 exit 1 fi if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then echo "usage: $0 <ref> <oldrev> <newrev>" >&2 exit 1 fi # --- Config allowunannotated=$(git config --bool hooks.allowunannotated) allowdeletebranch=$(git config --bool hooks.allowdeletebranch) denycreatebranch=$(git config --bool hooks.denycreatebranch) allowdeletetag=$(git config --bool hooks.allowdeletetag) allowmodifytag=$(git config --bool hooks.allowmodifytag) ======================================================================= ==.git/hooks/applypatch-msg.sample ======================================================================= #!/bin/sh # # An example hook script to check the commit log message taken by # applypatch from an e-mail message. # # The hook should exit with non-zero status after issuing an # appropriate message if it wants to stop the commit. The hook is # allowed to edit the commit message file. # # To enable this hook, rename this file to "applypatch-msg". . git-sh-setup test -x "$GIT_DIR/hooks/commit-msg" && exec "$GIT_DIR/hooks/commit-msg" ${1+"$@"} : ======================================================================= ==.git/hooks/pre-rebase.sample ======================================================================= #!/bin/sh # # Copyright (c) 2006, 2008 Junio C Hamano # # The "pre-rebase" hook is run just before "git rebase" starts doing # its job, and can prevent the command from running by exiting with # non-zero status. # # The hook is called with the following parameters: # # $1 -- the upstream the series was forked from. # $2 -- the branch being rebased (or empty when rebasing the current branch). # # This sample shows how to prevent topic branches that are already # merged to 'next' branch from getting rebased, because allowing it # would result in rebasing already published history. publish=next basebranch="$1" if test "$#" = 2 then topic="refs/heads/$2" else topic=`git symbolic-ref HEAD` || exit 0 ;# we do not interrupt rebasing detached HEAD fi case "$topic" in refs/heads/??/*) ;; *) exit 0 ;# we do not interrupt others. ;; esac # Now we are dealing with a topic branch being rebased # on top of master. Is it OK to rebase it? # Does the topic really exist? git show-ref -q "$topic" || { echo >&2 "No such branch $topic" exit 1 } # Is topic fully merged to master? not_in_master=`git rev-list --pretty=oneline ^master "$topic"` if test -z "$not_in_master" then echo >&2 "$topic is fully merged to master; better remove it." exit 1 ;# we could allow it, but there is no point. ======================================================================= ==.git/hooks/commit-msg.sample ======================================================================= #!/bin/sh # # An example hook script to check the commit log message. # Called by "git commit" with one argument, the name of the file # that has the commit message. The hook should exit with non-zero # status after issuing an appropriate message if it wants to stop the # commit. The hook is allowed to edit the commit message file. # # To enable this hook, rename this file to "commit-msg". # Uncomment the below to add a Signed-off-by line to the message. # Doing this in a hook is a bad idea in general, but the prepare-commit-msg # hook is more suited to it. # # SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') # grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" # This example catches duplicate Signed-off-by lines. test "" = "$(grep '^Signed-off-by: ' "$1" | sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { echo >&2 Duplicate Signed-off-by lines. exit 1 } ======================================================================= ==.git/hooks/post-update.sample ======================================================================= #!/bin/sh # # An example hook script to prepare a packed repository for use over # dumb transports. # # To enable this hook, rename this file to "post-update". exec git update-server-info ======================================================================= ==.git/hooks/prepare-commit-msg.sample ======================================================================= #!/bin/sh # # An example hook script to prepare the commit log message. # Called by "git commit" with the name of the file that has the # commit message, followed by the description of the commit # message's source. The hook's purpose is to edit the commit # message file. If the hook fails with a non-zero status, # the commit is aborted. # # To enable this hook, rename this file to "prepare-commit-msg". # This hook includes three examples. The first comments out the # "Conflicts:" part of a merge commit. # # The second includes the output of "git diff --name-status -r" # into the message, just before the "git status" output. It is # commented because it doesn't cope with --amend or with squashed # commits. # # The third example adds a Signed-off-by line to the message, that can # still be edited. This is rarely a good idea. case "$2,$3" in merge,) /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; # ,|template,) # /usr/bin/perl -i.bak -pe ' # print "\n" . `git diff --cached --name-status -r` # if /^#/ && $first++ == 0' "$1" ;; *) ;; esac # SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') # grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" ======================================================================= ==.git/hooks/pre-applypatch.sample ======================================================================= #!/bin/sh # # An example hook script to verify what is about to be committed # by applypatch from an e-mail message. # # The hook should exit with non-zero status after issuing an # appropriate message if it wants to stop the commit. # # To enable this hook, rename this file to "pre-applypatch". . git-sh-setup test -x "$GIT_DIR/hooks/pre-commit" && exec "$GIT_DIR/hooks/pre-commit" ${1+"$@"} : ======================================================================= ==.git/refs/tags/0.98.3RC0 ======================================================================= 611a400a44b725eb38049116af3aac89a2177236 ======================================================================= ==.git/refs/tags/0.94.20RC0 ======================================================================= b575fb0297590250cd7396f2d7be0d61f9bb8927 ======================================================================= ==.git/refs/heads/0.94 ======================================================================= 09c60d770f2869ca315910ba0f9a5ee9797b1edc ======================================================================= ==.git/refs/remotes/origin/master ======================================================================= de1f96096a0f34ab8910d5b52da60cb5cf5eecdf ======================================================================= ==.git/refs/remotes/origin/0.96 ======================================================================= 7d56e0523fd9eacf074b602a56bf48f98acea0c8 ======================================================================= ==.git/refs/remotes/origin/0.94 ======================================================================= 09c60d770f2869ca315910ba0f9a5ee9797b1edc ======================================================================= ==.git/refs/remotes/origin/hbase-10070 ======================================================================= 0abda799aa395ac0352de9a4720a6662e16c27d8 ======================================================================= ==.git/refs/remotes/origin/HEAD ======================================================================= ref: refs/remotes/origin/master ======================================================================= ==.git/refs/remotes/origin/0.98 ======================================================================= 503709fdf67d335fcb96f9a60a4b971de01479e1 ======================================================================= ==.git/logs/refs/heads/0.94 ======================================================================= 0000000000000000000000000000000000000000 ab9234e71d82bff1a13a921e91a9cfe42abda143 Lars Hofhansl <lhofhansl@yahoo.com> 1400816920 -0700 clone: from https://git-wip-us.apache.org/repos/asf/hbase.git ab9234e71d82bff1a13a921e91a9cfe42abda143 9264a40cf894918a7a9c3b3d5a907f8feb7285a0 Lars Hofhansl <lhofhansl@yahoo.com> 1400817158 -0700 commit: pom.xml, CHANGES.txt for 0.94.20RC0 9264a40cf894918a7a9c3b3d5a907f8feb7285a0 47fc8dd3fd6ec1cc8dd836d8abb0a8b906485922 Lars Hofhansl <larsh@apache.org> 1400828375 -0700 commit: updated CHANGES.txt for 0.94.20RC0 47fc8dd3fd6ec1cc8dd836d8abb0a8b906485922 20ce4e5f117411f3eb2a444ace12754e6072eaf2 Lars Hofhansl <larsh@apache.org> 1400828431 -0700 pull: Merge made by the 'recursive' strategy. 20ce4e5f117411f3eb2a444ace12754e6072eaf2 db401458b5fdf1d38f1771825922eb7bec17826b Lars Hofhansl <larsh@apache.org> 1400902857 -0700 pull: Merge made by the 'recursive' strategy. db401458b5fdf1d38f1771825922eb7bec17826b 11ee6177a984fffe592525fe8596c50c5eb4085f Lars Hofhansl <larsh@apache.org> 1400902869 -0700 rebase finished: refs/heads/0.94 onto e2eefd60eb66b907223f2316e56fe67d238d48d0 11ee6177a984fffe592525fe8596c50c5eb4085f e2eefd60eb66b907223f2316e56fe67d238d48d0 Lars Hofhansl <larsh@apache.org> 1400902924 -0700 reset: moving to HEAD^ e2eefd60eb66b907223f2316e56fe67d238d48d0 35408b5d2a1ec8f50471d6899e366801b1cd2992 Lars Hofhansl <larsh@apache.org> 1400907041 -0700 commit: HBASE-11247 [0.94] update maven-site-plugin to 3.3. 35408b5d2a1ec8f50471d6899e366801b1cd2992 09c60d770f2869ca315910ba0f9a5ee9797b1edc Lars Hofhansl <larsh@apache.org> 1400907343 -0700 commit: Sigh... One more addition to CHANGES.txt for RC0 ======================================================================= ==.git/logs/refs/remotes/origin/master ======================================================================= 41691e469ae4f59aa04bfec99b1e8a19699f1d04 c61cb7fb55124547a36a6ef56afaec43676039f8 Lars <lars@newbunny.(none)> 1400828431 -0700 pull: fast-forward c61cb7fb55124547a36a6ef56afaec43676039f8 de1f96096a0f34ab8910d5b52da60cb5cf5eecdf Lars <lars@newbunny.(none)> 1400902857 -0700 pull: fast-forward ======================================================================= ==.git/logs/refs/remotes/origin/0.96 ======================================================================= 6ea0b7f8c2af72c6fd379f04e2d7145928e237ad 7d56e0523fd9eacf074b602a56bf48f98acea0c8 Lars <lars@newbunny.(none)> 1400828431 -0700 pull: fast-forward ======================================================================= ==.git/logs/refs/remotes/origin/0.94 ======================================================================= ab9234e71d82bff1a13a921e91a9cfe42abda143 9264a40cf894918a7a9c3b3d5a907f8feb7285a0 Lars Hofhansl <lhofhansl@yahoo.com> 1400817202 -0700 update by push 9264a40cf894918a7a9c3b3d5a907f8feb7285a0 b2a2e060982634cfae4bec2665f2b63e1f680ff1 Lars <lars@newbunny.(none)> 1400828431 -0700 pull: fast-forward b2a2e060982634cfae4bec2665f2b63e1f680ff1 20ce4e5f117411f3eb2a444ace12754e6072eaf2 Lars Hofhansl <larsh@apache.org> 1400828469 -0700 update by push 20ce4e5f117411f3eb2a444ace12754e6072eaf2 e2eefd60eb66b907223f2316e56fe67d238d48d0 Lars <lars@newbunny.(none)> 1400902857 -0700 pull: forced-update e2eefd60eb66b907223f2316e56fe67d238d48d0 35408b5d2a1ec8f50471d6899e366801b1cd2992 Lars Hofhansl <larsh@apache.org> 1400907065 -0700 update by push 35408b5d2a1ec8f50471d6899e366801b1cd2992 09c60d770f2869ca315910ba0f9a5ee9797b1edc Lars Hofhansl <larsh@apache.org> 1400907354 -0700 update by push ======================================================================= ==.git/logs/refs/remotes/origin/hbase-10070 ======================================================================= 97b7df274c27ae1075d0782ad31cf284362eaf48 0abda799aa395ac0352de9a4720a6662e16c27d8 Lars <lars@newbunny.(none)> 1400902857 -0700 pull: fast-forward ======================================================================= ==.git/logs/refs/remotes/origin/HEAD ======================================================================= 0000000000000000000000000000000000000000 41691e469ae4f59aa04bfec99b1e8a19699f1d04 Lars Hofhansl <lhofhansl@yahoo.com> 1400816920 -0700 clone: from https://git-wip-us.apache.org/repos/asf/hbase.git ======================================================================= ==.git/logs/refs/remotes/origin/0.98 ======================================================================= d89342e4166c3f969d79e6be57d7efd0c37cd5b2 796134e9f5ebc97451d9e59bcc75fcb3d01b4a4d Lars <lars@newbunny.(none)> 1400828431 -0700 pull: fast-forward 796134e9f5ebc97451d9e59bcc75fcb3d01b4a4d 503709fdf67d335fcb96f9a60a4b971de01479e1 Lars <lars@newbunny.(none)> 1400902857 -0700 pull: fast-forward ======================================================================= ==.git/logs/HEAD ======================================================================= 0000000000000000000000000000000000000000 ab9234e71d82bff1a13a921e91a9cfe42abda143 Lars Hofhansl <lhofhansl@yahoo.com> 1400816920 -0700 clone: from https://git-wip-us.apache.org/repos/asf/hbase.git ab9234e71d82bff1a13a921e91a9cfe42abda143 9264a40cf894918a7a9c3b3d5a907f8feb7285a0 Lars Hofhansl <lhofhansl@yahoo.com> 1400817158 -0700 commit: pom.xml, CHANGES.txt for 0.94.20RC0 9264a40cf894918a7a9c3b3d5a907f8feb7285a0 47fc8dd3fd6ec1cc8dd836d8abb0a8b906485922 Lars Hofhansl <larsh@apache.org> 1400828375 -0700 commit: updated CHANGES.txt for 0.94.20RC0 47fc8dd3fd6ec1cc8dd836d8abb0a8b906485922 20ce4e5f117411f3eb2a444ace12754e6072eaf2 Lars Hofhansl <larsh@apache.org> 1400828431 -0700 pull: Merge made by the 'recursive' strategy. 20ce4e5f117411f3eb2a444ace12754e6072eaf2 db401458b5fdf1d38f1771825922eb7bec17826b Lars Hofhansl <larsh@apache.org> 1400902857 -0700 pull: Merge made by the 'recursive' strategy. db401458b5fdf1d38f1771825922eb7bec17826b e2eefd60eb66b907223f2316e56fe67d238d48d0 Lars Hofhansl <larsh@apache.org> 1400902869 -0700 pull --rebase: checkout e2eefd60eb66b907223f2316e56fe67d238d48d0 e2eefd60eb66b907223f2316e56fe67d238d48d0 11ee6177a984fffe592525fe8596c50c5eb4085f Lars Hofhansl <larsh@apache.org> 1400902869 -0700 pull --rebase: updated CHANGES.txt for 0.94.20RC0 11ee6177a984fffe592525fe8596c50c5eb4085f 11ee6177a984fffe592525fe8596c50c5eb4085f Lars Hofhansl <larsh@apache.org> 1400902869 -0700 rebase finished: returning to refs/heads/0.94 11ee6177a984fffe592525fe8596c50c5eb4085f e2eefd60eb66b907223f2316e56fe67d238d48d0 Lars Hofhansl <larsh@apache.org> 1400902924 -0700 reset: moving to HEAD^ e2eefd60eb66b907223f2316e56fe67d238d48d0 35408b5d2a1ec8f50471d6899e366801b1cd2992 Lars Hofhansl <larsh@apache.org> 1400907041 -0700 commit: HBASE-11247 [0.94] update maven-site-plugin to 3.3. 35408b5d2a1ec8f50471d6899e366801b1cd2992 09c60d770f2869ca315910ba0f9a5ee9797b1edc Lars Hofhansl <larsh@apache.org> 1400907343 -0700 commit: Sigh... One more addition to CHANGES.txt for RC0 ======================================================================= ==.git/HEAD ======================================================================= ref: refs/heads/0.94 ======================================================================= ==.git/packed-refs ======================================================================= # pack-refs with: peeled fully-peeled 225a9ff80814ba900567160bd66c2790ac395ea6 refs/remotes/origin/0.1 76bd7a435717e314cfcfbe6a5125cc3bc9f6600c refs/remotes/origin/0.18 cba75045e43e0e7b5ea45814430833c6b05e2eae refs/remotes/origin/0.19 32a0de7c563609280cda0cff1726800385a58498 refs/remotes/origin/0.19_on_hadoop_0.18 b64b592f237e873bcb96b87c34c8537ee9af5ed4 refs/remotes/origin/0.2 3196c23cdc5c5a6b7089c85ffa500a9f754c0f49 refs/remotes/origin/0.20 61cd9b450ca8fef698e6032ba38f3dcc6d54d645 refs/remotes/origin/0.20_on_hadoop-0.18.3 fc685e7ab32d437dd1f0f3dcab829493888d159f refs/remotes/origin/0.20_on_hadoop-0.21 2f8f236014c202250db178ad4a64f25dd79f371b refs/remotes/origin/0.89 9d18d00259702b0a622127479cc2d1e56ad0a6c1 refs/remotes/origin/0.89-fb 31dcca3d1eea10cca90460911134bbb20e41dc2e refs/remotes/origin/0.89-fb-accidentally-wiped-commit-log dffc4672fa8bb4fa98acabf8ad29553310e77b2b refs/remotes/origin/0.89.0621 1bc79b40c23749ffd696624661699b19e677bb95 refs/remotes/origin/0.89.20100621 526f9e14fc92b3a4c76b1d58ebef1c91f050ae75 refs/remotes/origin/0.89.20100726 d43f89f72b6e90075527314d04fd444b233043fa refs/remotes/origin/0.89.20100830 1c2775af4490b89b24a8cad7ce41798da95aa387 refs/remotes/origin/0.89.20100924 d39548939457d84b033eddbcf9e641c00a6d2e35 refs/remotes/origin/0.90 a6208ec1bcb9ec5002b5fab0262e8107d7b7389a refs/remotes/origin/0.90_coprocessors 1cf0c82d71dd13c41ddb2a3cb5d82058798405fc refs/remotes/origin/0.90_master_rewrite 4037c8de59598b3241bcf2060aea6f253176d782 refs/remotes/origin/0.92 2cf68bdc1fbaf20be72819dd5b9ef204f30457c4 refs/remotes/origin/0.92.0rc4 ab9234e71d82bff1a13a921e91a9cfe42abda143 refs/remotes/origin/0.94 0005cd2c2420f9d6e1d27dec3841b6d76d06c07a refs/remotes/origin/0.94-test 4793668c1d70b7a784e12a782b79d15099117ea0 refs/remotes/origin/0.95 6ea0b7f8c2af72c6fd379f04e2d7145928e237ad refs/remotes/origin/0.96 d89342e4166c3f969d79e6be57d7efd0c37cd5b2 refs/remotes/origin/0.98 b2ee50a3cf81c0774cf8d9ee3a3760a0e961c3d1 refs/remotes/origin/former_0.20 97b7df274c27ae1075d0782ad31cf284362eaf48 refs/remotes/origin/hbase-10070 9b2f9d5316776f4c510ceb304703be3d801722e5 refs/remotes/origin/hbase-7290 ca65f47aee238e6071dab5c9d4fd2323550ea82e refs/remotes/origin/hbase-7290v2 be92c350d9c117c94dc3bf41ab8cf1208e1796c8 refs/remotes/origin/instant_schema_alter 41691e469ae4f59aa04bfec99b1e8a19699f1d04 refs/remotes/origin/master e9295fdf9b9bae1ca36e0bd495fa92cb43f04717 refs/remotes/origin/testing_remove 6826da42644036d4e32db9347e64e5fea87f1fc6 refs/remotes/origin/trunk_on_hadoop-0.19.1-dev_with_hadoop-4379 9f4aeb5eea6043225a095bf2124079641bc064d9 refs/tags/0.1.0 ^ff87e088bd8904b1f1dc15f8c7e4efd8e568b125 cf22059d452e8b03ca799b237340dbb62c5d0afb refs/tags/0.1.1 ^3a05d25478c93a857e78d9bc5f4f5b1178f58f3d c9aacb889f58a1157b77a19dabc4a4630aa45b5d refs/tags/0.1.2 ^a5aaa845aa3f565a3131b38cbe3a85716a43dd36 86333924a2c0a28b2a0045e3bda580db54033689 refs/tags/0.1.3 ^077523b2f8b78d9ed3186cfc00a26c9e4ef05561 518db6e12bfc9ff81d54e1c35039c29a9c87177a refs/tags/0.18.1 ^952719f5d172c32af04e98358da9c08e108ac269 2ddc7a8f734426a30d2fdd802feadcb4e5015bdc refs/tags/0.19.0 ^f479355fa925100840bbdd1efe6709df26a8d280 e575f18691e2b2c05d63ee0220f87a2454808d6f refs/tags/0.19.1 ^28a63125aeaa2a500a7cce2a7e9da7c7f135210d a66f3c54e2ffb165585e39d74af3d207a6221d7b refs/tags/0.19.1RC1 ======================================================================= ==.git/config ======================================================================= [core] repositoryformatversion = 0 filemode = true bare = false logallrefupdates = true [remote "origin"] url = https://git-wip-us.apache.org/repos/asf/hbase.git fetch = +refs/heads/*:refs/remotes/origin/* [branch "0.94"] remote = origin merge = refs/heads/0.94 ======================================================================= ==.gitignore ======================================================================= /.arc_jira_lib /.classpath /.externalToolBuilders /.project /.settings /build /.idea/ /logs /target *.iml *.orig *~ ======================================================================= ==CHANGES.txt ======================================================================= HBase Change Log Release 0.94.20 - 05/23/2014 Sub-task [HBASE-10936] - Add zeroByte encoding test Bug [HBASE-10958] - [dataloss] Bulk loading with seqids can prevent some log entries from being replayed [HBASE-11110] - Ability to load FilterList class is dependent on context classloader [HBASE-11143] - Improve replication metrics [HBASE-11188] - "Inconsistent configuration" for SchemaMetrics is always shown [HBASE-11212] - Fix increment index in KeyValueSortReducer [HBASE-11225] - Backport fix for HBASE-10417 'index is not incremented in PutSortReducer#reduce()' [HBASE-11247] - [0.94] update maven-site-plugin to 3.3 Improvement [HBASE-11008] - Align bulk load, flush, and compact to require Action.CREATE [HBASE-11119] - Update ExportSnapShot to optionally not use a tmp file on external file system [HBASE-11128] - Add -target option to ExportSnapshot to export with a different name [HBASE-11134] - Add a -list-snapshots option to SnapshotInfo Release 0.94.19 - 04/21/2014 Bug [HBASE-10118] - Major compact keeps deletes with future timestamps [HBASE-10312] - Flooding the cluster with administrative actions leads to collapse [HBASE-10533] - commands.rb is giving wrong error messages on exceptions [HBASE-10766] - SnapshotCleaner allows to delete referenced files [HBASE-10805] - Speed up KeyValueHeap.next() a bit [HBASE-10807] - -ROOT- still stale in table.jsp if it moved [HBASE-10845] - Memstore snapshot size isn't updated in DefaultMemStore#rollback() [HBASE-10847] - 0.94: drop non-secure builds, make security the default [HBASE-10848] - Filter SingleColumnValueFilter combined with NullComparator does not work [HBASE-10966] - RowCounter misinterprets column names that have colons in their qualifier [HBASE-10991] - Port HBASE-10639 'Unload script displays wrong counts (off by one) when unloading regions' to 0.94 [HBASE-11003] - ExportSnapshot is using the wrong fs when staging dir is not in fs.defaultFS [HBASE-11030] - HBaseTestingUtility.getMiniHBaseCluster should be able to return null Task [HBASE-10921] - Port HBASE-10323 'Auto detect data block encoding in HFileOutputFormat' to 0.94 / 0.96 Test [HBASE-10782] - Hadoop2 MR tests fail occasionally because of mapreduce.jobhistory.address is no set in job conf [HBASE-10969] - TestDistributedLogSplitting fails frequently in 0.94. [HBASE-10982] - TestZKProcedure.testMultiCohortWithMemberTimeoutDuringPrepare fails frequently in 0.94 ======================================================================= ==conf/regionservers ======================================================================= localhost ======================================================================= ==conf/log4j.properties ======================================================================= # Define some default values that can be overridden by system properties hbase.root.logger=INFO,console hbase.security.logger=INFO,console hbase.log.dir=. hbase.log.file=hbase.log # Define the root logger to the system property "hbase.root.logger". log4j.rootLogger=${hbase.root.logger} # Logging Threshold log4j.threshold=ALL # # Daily Rolling File Appender # log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file} # Rollver at midnight log4j.appender.DRFA.DatePattern=.yyyy-MM-dd # 30-day backup #log4j.appender.DRFA.MaxBackupIndex=30 log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout # Pattern format: Date LogLevel LoggerName LogMessage log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n # Debugging Pattern format #log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n # # Security audit appender # hbase.security.log.file=SecurityAuth.audit log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender log4j.appender.DRFAS.File=${hbase.log.dir}/${hbase.security.log.file} log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n log4j.category.SecurityLogger=${hbase.security.logger} log4j.additivity.SecurityLogger=false #log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE # # Null Appender # log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender # # console ======================================================================= ==conf/hadoop-metrics.properties ======================================================================= # See http://wiki.apache.org/hadoop/GangliaMetrics # Make sure you know whether you are using ganglia 3.0 or 3.1. # If 3.1, you will have to patch your hadoop instance with HADOOP-4675 # And, yes, this file is named hadoop-metrics.properties rather than # hbase-metrics.properties because we're leveraging the hadoop metrics # package and hadoop-metrics.properties is an hardcoded-name, at least # for the moment. # # See also http://hadoop.apache.org/hbase/docs/current/metrics.html # GMETADHOST_IP is the hostname (or) IP address of the server on which the ganglia # meta daemon (gmetad) service is running # Configuration of the "hbase" context for NullContextWithUpdateThread # NullContextWithUpdateThread is a null context which has a thread calling # periodically when monitoring is started. This keeps the data sampled # correctly. hbase.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread hbase.period=10 # Configuration of the "hbase" context for file # hbase.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext # hbase.fileName=/tmp/metrics_hbase.log # HBase-specific configuration to reset long-running stats (e.g. compactions) # If this variable is left out, then the default is no expiration. hbase.extendedperiod = 3600 # Configuration of the "hbase" context for ganglia # Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter) # hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext # hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext31 # hbase.period=10 # hbase.servers=GMETADHOST_IP:8649 # Configuration of the "jvm" context for null jvm.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread jvm.period=10 # Configuration of the "jvm" context for file # jvm.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext # jvm.fileName=/tmp/metrics_jvm.log # Configuration of the "jvm" context for ganglia # Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter) # jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext # jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext31 # jvm.period=10 # jvm.servers=GMETADHOST_IP:8649 # Configuration of the "rpc" context for null