1   /**
2    * Copyright 2011 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  
21  package org.apache.hadoop.hbase.client;
22  
23  import static org.junit.Assert.fail;
24  
25  import java.io.ByteArrayInputStream;
26  import java.io.ByteArrayOutputStream;
27  import java.io.DataInput;
28  import java.io.DataInputStream;
29  import java.io.DataOutput;
30  import java.io.DataOutputStream;
31  import java.io.File;
32  import java.io.FileOutputStream;
33  import java.io.IOException;
34  import java.util.Arrays;
35  
36  import org.apache.hadoop.conf.Configuration;
37  import org.apache.hadoop.hbase.HBaseConfiguration;
38  import org.apache.hadoop.hbase.SmallTests;
39  import org.apache.hadoop.hbase.util.Base64;
40  import org.apache.hadoop.hbase.util.Bytes;
41  import org.junit.Assert;
42  import org.junit.Test;
43  import org.junit.experimental.categories.Category;
44  
45  import com.google.common.io.ByteStreams;
46  
47  // TODO: cover more test cases
48  @Category(SmallTests.class)
49  public class TestGet {
50  
51    private static final String WRITABLE_GET =
52      "AgD//////////wAAAAEBD3Rlc3QuTW9ja0ZpbHRlcgEAAAAAAAAAAH//////////AQAAAAAAAAAA";
53  
54    private static final String MOCK_FILTER_JAR =
55      "UEsDBBQACAgIACmBi0IAAAAAAAAAAAAAAAAJAAQATUVUQS1JTkYv/soAAAMAUEsHCAAAAAACAAAA" +
56      "AAAAAFBLAwQUAAgICAApgYtCAAAAAAAAAAAAAAAAFAAAAE1FVEEtSU5GL01BTklGRVNULk1G803M" +
57      "y0xLLS7RDUstKs7Mz7NSMNQz4OVyLkpNLElN0XWqBAmY6xnEG1gqaPgXJSbnpCo45xcV5BcllgCV" +
58      "a/Jy8XIBAFBLBwgxyqRbQwAAAEQAAABQSwMECgAACAAAbICLQgAAAAAAAAAAAAAAAAUAAAB0ZXN0" +
59      "L1BLAwQUAAgICAAcgItCAAAAAAAAAAAAAAAAFQAAAHRlc3QvTW9ja0ZpbHRlci5jbGFzc41Qy07C" +
60      "QBS9A4VKBZGHoO7cgQvHmLjCuPBBQlJloWE/tCMdLZ1mOlV/y5WJCz/AjzLeDqCRYOIs7uuce87N" +
61      "fHy+vQPAEezakCNQ1TzR9Ep6D30Raq5ssAh0pZpQFjMv4DRgvpQxDcYs4fTOcOiMeoYTAsUTEQl9" +
62      "SiDf6Y4IWOfS5w7koVSGAhTRwBURv06nY65u2TjEjborPRaOmBJZPx9aOhAJgZq7dE+PgKM48/uC" +
63      "hz4SWh33nj0yKiS9YJoNojjVvczYuXz2eKyFjBIb6gQaC9pg+I2gDVOTQwRXiBAoPCmh8Zb2b49h" +
64      "qhcmzVUAet/IVHkcL8bt6s/xBxkb9gA/B7KXxwo/BaONHcVMMBf2X2HtBYscOBiLZliCdYzlGQFz" +
65      "BTOBDagiaxNrC7uakTk2m4guS1SMRGsGziWyqgFN47xlsH+K1f4UaxuxbcPf+QJQSwcI8UIYqlEB" +
66      "AABeAgAAUEsBAhQAFAAICAgAKYGLQgAAAAACAAAAAAAAAAkABAAAAAAAAAAAAAAAAAAAAE1FVEEt" +
67      "SU5GL/7KAABQSwECFAAUAAgICAApgYtCMcqkW0MAAABEAAAAFAAAAAAAAAAAAAAAAAA9AAAATUVU" +
68      "QS1JTkYvTUFOSUZFU1QuTUZQSwECCgAKAAAIAABsgItCAAAAAAAAAAAAAAAABQAAAAAAAAAAAAAA" +
69      "AADCAAAAdGVzdC9QSwECFAAUAAgICAAcgItC8UIYqlEBAABeAgAAFQAAAAAAAAAAAAAAAADlAAAA" +
70      "dGVzdC9Nb2NrRmlsdGVyLmNsYXNzUEsFBgAAAAAEAAQA8wAAAHkCAAAAAA==";
71  
72    @Test
73    public void testAttributesSerialization() throws IOException {
74      Get get = new Get();
75      get.setAttribute("attribute1", Bytes.toBytes("value1"));
76      get.setAttribute("attribute2", Bytes.toBytes("value2"));
77      get.setAttribute("attribute3", Bytes.toBytes("value3"));
78  
79      ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
80      DataOutput out = new DataOutputStream(byteArrayOutputStream);
81      get.write(out);
82  
83      Get get2 = new Get();
84      Assert.assertTrue(get2.getAttributesMap().isEmpty());
85  
86      get2.readFields(new DataInputStream(new ByteArrayInputStream(byteArrayOutputStream.toByteArray())));
87  
88      Assert.assertNull(get2.getAttribute("absent"));
89      Assert.assertTrue(Arrays.equals(Bytes.toBytes("value1"), get2.getAttribute("attribute1")));
90      Assert.assertTrue(Arrays.equals(Bytes.toBytes("value2"), get2.getAttribute("attribute2")));
91      Assert.assertTrue(Arrays.equals(Bytes.toBytes("value3"), get2.getAttribute("attribute3")));
92      Assert.assertEquals(3, get2.getAttributesMap().size());
93    }
94  
95    @Test
96    public void testGetAttributes() {
97      Get get = new Get();
98      Assert.assertTrue(get.getAttributesMap().isEmpty());
99      Assert.assertNull(get.getAttribute("absent"));
100 
101     get.setAttribute("absent", null);
102     Assert.assertTrue(get.getAttributesMap().isEmpty());
103     Assert.assertNull(get.getAttribute("absent"));
104 
105     // adding attribute
106     get.setAttribute("attribute1", Bytes.toBytes("value1"));
107     Assert.assertTrue(Arrays.equals(Bytes.toBytes("value1"), get.getAttribute("attribute1")));
108     Assert.assertEquals(1, get.getAttributesMap().size());
109     Assert.assertTrue(Arrays.equals(Bytes.toBytes("value1"), get.getAttributesMap().get("attribute1")));
110 
111     // overriding attribute value
112     get.setAttribute("attribute1", Bytes.toBytes("value12"));
113     Assert.assertTrue(Arrays.equals(Bytes.toBytes("value12"), get.getAttribute("attribute1")));
114     Assert.assertEquals(1, get.getAttributesMap().size());
115     Assert.assertTrue(Arrays.equals(Bytes.toBytes("value12"), get.getAttributesMap().get("attribute1")));
116 
117     // adding another attribute
118     get.setAttribute("attribute2", Bytes.toBytes("value2"));
119     Assert.assertTrue(Arrays.equals(Bytes.toBytes("value2"), get.getAttribute("attribute2")));
120     Assert.assertEquals(2, get.getAttributesMap().size());
121     Assert.assertTrue(Arrays.equals(Bytes.toBytes("value2"), get.getAttributesMap().get("attribute2")));
122 
123     // removing attribute
124     get.setAttribute("attribute2", null);
125     Assert.assertNull(get.getAttribute("attribute2"));
126     Assert.assertEquals(1, get.getAttributesMap().size());
127     Assert.assertNull(get.getAttributesMap().get("attribute2"));
128 
129     // removing non-existed attribute
130     get.setAttribute("attribute2", null);
131     Assert.assertNull(get.getAttribute("attribute2"));
132     Assert.assertEquals(1, get.getAttributesMap().size());
133     Assert.assertNull(get.getAttributesMap().get("attribute2"));
134 
135     // removing another attribute
136     get.setAttribute("attribute1", null);
137     Assert.assertNull(get.getAttribute("attribute1"));
138     Assert.assertTrue(get.getAttributesMap().isEmpty());
139     Assert.assertNull(get.getAttributesMap().get("attribute1"));
140   }
141 
142   @Test
143   public void testDynamicFilter() throws Exception {
144     DataInput dis = ByteStreams.newDataInput(Base64.decode(WRITABLE_GET));
145     Get get = new Get();
146     try {
147       get.readFields(dis);
148       fail("Should not be able to load the filter class");
149     } catch (RuntimeException re) {
150       String msg = re.getMessage();
151       Assert.assertTrue(msg != null
152         && msg.contains("Can't find class test.MockFilter"));
153     }
154 
155     Configuration conf = HBaseConfiguration.create();
156     String localPath = conf.get("hbase.local.dir") + File.separator
157       + "dynamic" + File.separator + "jars" + File.separator;
158     File jarFile = new File(localPath, "MockFilter.jar");
159     jarFile.deleteOnExit();
160 
161     FileOutputStream fos = new FileOutputStream(jarFile);
162     fos.write(Base64.decode(MOCK_FILTER_JAR));
163     fos.close();
164 
165     dis = ByteStreams.newDataInput(Base64.decode(WRITABLE_GET));
166     get.readFields(dis);
167     Assert.assertEquals("test.MockFilter",
168       get.getFilter().getClass().getName());
169   }
170 
171   @org.junit.Rule
172   public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
173     new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
174 }
175