METAMODEL-1205: Fixed CassandraUnit, Guava, Hadoop for JDK9+
[metamodel.git] / hbase / src / test / java / org / apache / metamodel / hbase / HBaseDataContextTest.java
1 /**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing,
13 * software distributed under the License is distributed on an
14 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 * KIND, either express or implied. See the License for the
16 * specific language governing permissions and limitations
17 * under the License.
18 */
19 package org.apache.metamodel.hbase;
20
21 import static org.junit.Assert.assertEquals;
22 import static org.junit.Assert.assertFalse;
23 import static org.junit.Assert.assertNotNull;
24 import static org.junit.Assert.assertTrue;
25
26 import java.io.IOException;
27 import java.util.Arrays;
28
29 import org.apache.hadoop.hbase.TableName;
30 import org.apache.hadoop.hbase.client.Admin;
31 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
32 import org.apache.hadoop.hbase.client.Put;
33 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
34 import org.apache.metamodel.data.DataSet;
35 import org.apache.metamodel.schema.ColumnType;
36 import org.apache.metamodel.schema.Table;
37 import org.apache.metamodel.util.SimpleTableDef;
38 import org.junit.Before;
39 import org.junit.Test;
40
41 public class HBaseDataContextTest extends HBaseTestCase {
42
43 @Override
44 @Before
45 public void setUp() throws Exception {
46 super.setUp();
47 createTableNatively();
48 }
49
50 @Test
51 public void testCreateInsertQueryAndDrop() throws Exception {
52 // test the schema exploration
53 final Table table = getDataContext().getDefaultSchema().getTableByName(TABLE_NAME);
54 assertNotNull(table);
55
56 assertEquals("[" + HBaseDataContext.FIELD_ID + ", " + CF_BAR + ", " + CF_FOO + "]",
57 Arrays.toString(table.getColumnNames().toArray()));
58 assertEquals(HBaseColumn.DEFAULT_COLUMN_TYPE_FOR_COLUMN_FAMILIES, table.getColumn(1).getType());
59
60 // insert two records
61 insertRecordsNatively();
62
63 // query using regular configuration
64 try (final DataSet dataSet1 = getDataContext().query().from(TABLE_NAME).selectAll().execute()) {
65 assertTrue(dataSet1.next());
66 assertEquals("Row[values=[" + RK_1 + ", {" + Q_HEY + "=" + V_YO + "," + Q_HI + "=" + V_THERE + "}, {"
67 + Q_HELLO + "=" + V_WORLD + "}]]", dataSet1.getRow().toString());
68 assertTrue(dataSet1.next());
69 assertEquals("Row[values=[" + RK_2 + ", {" + Q_BAH + "=" + new String(V_123_BYTE_ARRAY) + "," + Q_HI + "="
70 + V_YOU + "}, {}]]", dataSet1.getRow().toString());
71 assertFalse(dataSet1.next());
72 }
73
74 // query using custom table definitions
75 final String columnName1 = CF_FOO;
76 final String columnName2 = CF_BAR + ":" + Q_HI;
77 final String columnName3 = CF_BAR + ":" + Q_HEY;
78 final String[] columnNames = new String[] { columnName1, columnName2, columnName3 };
79 final ColumnType[] columnTypes = new ColumnType[] { ColumnType.MAP, ColumnType.VARCHAR, ColumnType.VARCHAR };
80 final SimpleTableDef[] tableDefinitions =
81 new SimpleTableDef[] { new SimpleTableDef(TABLE_NAME, columnNames, columnTypes) };
82 setDataContext(new HBaseDataContext(new HBaseConfiguration("SCH", getZookeeperHostname(), getZookeeperPort(),
83 tableDefinitions, ColumnType.VARCHAR)));
84
85 try (final DataSet dataSet2 =
86 getDataContext().query().from(TABLE_NAME).select(columnName1, columnName2, columnName3).execute()) {
87 assertTrue(dataSet2.next());
88 assertEquals("Row[values=[{" + Q_HELLO + "=" + V_WORLD + "}, " + V_THERE + ", " + V_YO + "]]",
89 dataSet2.getRow().toString());
90 assertTrue(dataSet2.next());
91 assertEquals("Row[values=[{}, " + V_YOU + ", null]]", dataSet2.getRow().toString());
92 assertFalse(dataSet2.next());
93 }
94
95 // query count
96 try (final DataSet dataSet3 = getDataContext().query().from(TABLE_NAME).selectCount().execute()) {
97 assertTrue(dataSet3.next());
98 assertEquals("Row[values=[" + NUMBER_OF_ROWS + "]]", dataSet3.getRow().toString());
99 assertFalse(dataSet3.next());
100 }
101
102 // query only id
103 try (final DataSet dataSet4 =
104 getDataContext().query().from(TABLE_NAME).select(HBaseDataContext.FIELD_ID).execute()) {
105 assertTrue(dataSet4.next());
106 assertEquals("Row[values=[" + RK_1 + "]]", dataSet4.getRow().toString());
107 assertTrue(dataSet4.next());
108 assertEquals("Row[values=[" + RK_2 + "]]", dataSet4.getRow().toString());
109 assertFalse(dataSet4.next());
110 }
111
112 // primary key lookup query - using GET
113 try (final DataSet dataSet5 = getDataContext().query().from(TABLE_NAME).select(HBaseDataContext.FIELD_ID)
114 .where(HBaseDataContext.FIELD_ID).eq(RK_1).execute()) {
115 assertTrue(dataSet5.next());
116 assertEquals("Row[values=[" + RK_1 + "]]", dataSet5.getRow().toString());
117 assertFalse(dataSet5.next());
118 }
119 }
120
121 private void insertRecordsNatively() throws IOException, InterruptedException {
122 try (final org.apache.hadoop.hbase.client.Table hTable = getDataContext().getHTable(TABLE_NAME)) {
123 final Put put1 = new Put(RK_1.getBytes());
124 put1.addColumn(CF_FOO.getBytes(), Q_HELLO.getBytes(), V_WORLD.getBytes());
125 put1.addColumn(CF_BAR.getBytes(), Q_HI.getBytes(), V_THERE.getBytes());
126 put1.addColumn(CF_BAR.getBytes(), Q_HEY.getBytes(), V_YO.getBytes());
127
128 final Put put2 = new Put(RK_2.getBytes());
129 put2.addColumn(CF_BAR.getBytes(), Q_BAH.getBytes(), V_123_BYTE_ARRAY);
130 put2.addColumn(CF_BAR.getBytes(), Q_HI.getBytes(), V_YOU.getBytes());
131
132 final Object[] result = new Object[NUMBER_OF_ROWS];
133 hTable.batch(Arrays.asList(put1, put2), result);
134 }
135 }
136
137 private void createTableNatively() throws IOException {
138 try (Admin admin = getDataContext().getAdmin()) {
139 final TableName tableName = TableName.valueOf(TABLE_NAME);
140
141 // Check if the table exists
142 if (admin.isTableAvailable(tableName)) {
143 // table already exists
144 System.out.println("Unittest table already exists: " + TABLE_NAME);
145 } else {
146 // Create table
147 System.out.println("Creating table");
148 final TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tableName);
149 tableDescriptor.setColumnFamily(ColumnFamilyDescriptorBuilder.of(CF_FOO.getBytes()));
150 tableDescriptor.setColumnFamily(ColumnFamilyDescriptorBuilder.of(CF_BAR.getBytes()));
151 admin.createTable(tableDescriptor.build());
152 System.out.println("Created table");
153 }
154 }
155 }
156 }