View Javadoc

1   /*
2    * Copyright 2010 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  
21  package org.apache.hadoop.hbase.rest;
22  
23  import java.io.IOException;
24  import java.util.Iterator;
25  import java.util.NoSuchElementException;
26  
27  import org.apache.hadoop.hbase.HColumnDescriptor;
28  import org.apache.hadoop.hbase.KeyValue;
29  import org.apache.hadoop.hbase.client.Get;
30  import org.apache.hadoop.hbase.client.HTableInterface;
31  import org.apache.hadoop.hbase.client.HTablePool;
32  import org.apache.hadoop.hbase.client.Result;
33  import org.apache.hadoop.hbase.filter.Filter;
34  
35  public class RowResultGenerator extends ResultGenerator {
36    private Iterator<KeyValue> valuesI;
37    private KeyValue cache;
38  
39    public RowResultGenerator(final String tableName, final RowSpec rowspec,
40        final Filter filter) throws IllegalArgumentException, IOException {
41      HTablePool pool = RESTServlet.getInstance().getTablePool(); 
42      HTableInterface table = pool.getTable(tableName);
43      try {
44        Get get = new Get(rowspec.getRow());
45        if (rowspec.hasColumns()) {
46          for (byte[] col: rowspec.getColumns()) {
47            byte[][] split = KeyValue.parseColumn(col);
48            if (split.length == 2 && split[1].length != 0) {
49              get.addColumn(split[0], split[1]);
50            } else {
51              get.addFamily(split[0]);
52            }
53          }
54        } else {
55          // rowspec does not explicitly specify columns, return them all
56          for (HColumnDescriptor family: 
57              table.getTableDescriptor().getFamilies()) {
58            get.addFamily(family.getName());
59          }
60        }
61        get.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());
62        get.setMaxVersions(rowspec.getMaxVersions());
63        if (filter != null) {
64          get.setFilter(filter);
65        }
66        Result result = table.get(get);
67        if (result != null && !result.isEmpty()) {
68          valuesI = result.list().iterator();
69        }
70      } finally {
71        pool.putTable(table);
72      }
73    }
74  
75    public void close() {
76    }
77  
78    public boolean hasNext() {
79      if (cache != null) {
80        return true;
81      }
82      if (valuesI == null) {
83        return false;
84      }
85      return valuesI.hasNext();
86    }
87  
88    public KeyValue next() {
89      if (cache != null) {
90        KeyValue kv = cache;
91        cache = null;
92        return kv;
93      }
94      if (valuesI == null) {
95        return null;
96      }
97      try {
98        return valuesI.next();
99      } catch (NoSuchElementException e) {
100       return null;
101     }
102   }
103 
104   public void putBack(KeyValue kv) {
105     this.cache = kv;
106   }
107 
108   public void remove() {
109     throw new UnsupportedOperationException("remove not supported");
110   }
111 }