PHOENIX-374: Enable access to dynamic columns in * or cf.* selection
authorChinmay Kulkarni <chinmayskulkarni@gmail.com>
Tue, 22 Jan 2019 06:00:06 +0000 (22:00 -0800)
committerChinmay Kulkarni <chinmayskulkarni@gmail.com>
Tue, 5 Feb 2019 21:13:49 +0000 (13:13 -0800)
33 files changed:
phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnWildcardIT.java [new file with mode: 0644]
phoenix-core/src/main/java/org/apache/phoenix/compile/ProjectionCompiler.java
phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
phoenix-core/src/main/java/org/apache/phoenix/compile/RowProjector.java
phoenix-core/src/main/java/org/apache/phoenix/compile/TupleProjectionCompiler.java
phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java
phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ScanRegionObserver.java
phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/DynamicColumnMetaDataProtos.java [new file with mode: 0644]
phoenix-core/src/main/java/org/apache/phoenix/execute/BaseQueryPlan.java
phoenix-core/src/main/java/org/apache/phoenix/execute/CorrelatePlan.java
phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
phoenix-core/src/main/java/org/apache/phoenix/execute/SortMergeJoinPlan.java
phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java
phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java
phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/KeyValueBuilder.java
phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
phoenix-core/src/main/java/org/apache/phoenix/iterate/RegionScannerFactory.java
phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixResultSet.java
phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
phoenix-core/src/main/java/org/apache/phoenix/query/QueryServices.java
phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
phoenix-core/src/main/java/org/apache/phoenix/schema/PRow.java
phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/BaseTuple.java
phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/DelegateTuple.java
phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/Tuple.java
phoenix-core/src/main/java/org/apache/phoenix/util/EncodedColumnsUtil.java
phoenix-core/src/main/java/org/apache/phoenix/util/MetaDataUtil.java
phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
phoenix-core/src/test/java/org/apache/phoenix/util/MetaDataUtilTest.java
phoenix-protocol/src/main/DynamicColumnMetaData.proto [new file with mode: 0644]

diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnWildcardIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DynamicColumnWildcardIT.java
new file mode 100644 (file)
index 0000000..39a703b
--- /dev/null
@@ -0,0 +1,521 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.phoenix.end2end;
+
+import static java.sql.Types.BIGINT;
+import static java.sql.Types.INTEGER;
+import static java.sql.Types.VARCHAR;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_ENCODED_BYTES;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IMMUTABLE_STORAGE_SCHEME;
+import static org.apache.phoenix.schema.PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN;
+import static org.junit.Assert.assertEquals;
+
+import com.google.common.collect.Maps;
+import org.apache.phoenix.query.BaseTest;
+import org.apache.phoenix.schema.PTable.ImmutableStorageScheme;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Map;
+
+import static org.apache.phoenix.query.QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB;
+
+/**
+ * Tests to check whether we correctly expose dynamic columns for wildcard queries when
+ * {@link org.apache.phoenix.query.QueryServices#WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB} config is
+ * turned on
+ */
+@RunWith(Parameterized.class)
+public class DynamicColumnWildcardIT extends BaseTest {
+    private final boolean mutableTable;
+    private final ImmutableStorageScheme storageScheme;
+
+    // name is used by failsafe as file name in reports
+    @Parameterized.Parameters(name="DynamicColumnWildcardIT_mutable={0}, storageScheme={1}")
+    public static Collection<Object[]> data() {
+        // TODO: Once PHOENIX-5107 is fixed, add a case for SINGLE_CELL_ARRAY_WITH_OFFSETS
+        return Arrays.asList(new Object[][] {
+                {true, null}, {false, ONE_CELL_PER_COLUMN}});
+    }
+
+    public DynamicColumnWildcardIT(boolean mutableTable, ImmutableStorageScheme storageScheme) {
+        this.mutableTable = mutableTable;
+        this.storageScheme = storageScheme;
+    }
+
+    @BeforeClass
+    public static void doSetup() throws Exception {
+        Map<String, String> clientProps = Maps.newHashMapWithExpectedSize(1);
+        clientProps.put(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB, "true");
+        setUpTestDriver(ReadOnlyProps.EMPTY_PROPS, new ReadOnlyProps(clientProps));
+    }
+
+    // Create either a mutable table or an immutable table with the specified storage scheme
+    private String generateTableCreateDDL(String tableName, String schema) {
+        StringBuilder sb = new StringBuilder("CREATE ");
+        if (!this.mutableTable) {
+            sb.append("IMMUTABLE ");
+        }
+        sb.append("TABLE ").append(tableName).append(schema);
+        if (!this.mutableTable && this.storageScheme != null) {
+            sb.append(" ").append(IMMUTABLE_STORAGE_SCHEME).append("=").append(this.storageScheme);
+            sb.append(", ").append(COLUMN_ENCODED_BYTES).append("=1");
+        }
+        return sb.toString();
+    }
+
+    @Test
+    // Test the case where the table DDL only contains 1 column which is the primary key
+    public void testOnlySinglePkWithDynamicColumns() throws SQLException {
+        Connection conn = DriverManager.getConnection(getUrl());
+        conn.setAutoCommit(true);
+        String tableName = generateUniqueName();
+        conn.createStatement().execute(generateTableCreateDDL(tableName,
+                " (A INTEGER PRIMARY KEY)"));
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A) VALUES(10)");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, DYN1 INTEGER) VALUES(90, 3)");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, DYN1 VARCHAR) VALUES(100, 'test')");
+
+        ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM " + tableName);
+        int rsCounter = 0;
+        while(rs.next()) {
+            ResultSetMetaData rmd = rs.getMetaData();
+            int count = rmd.getColumnCount();
+            assertEquals(rsCounter == 0 ? 1 : 2, count);
+            for (int i = 1; i <= count; i++) {
+                if (rsCounter == 0) {
+                    assertEquals("A", rmd.getColumnName(i));
+                    assertEquals(INTEGER, rmd.getColumnType(i));
+                    assertEquals(10, rs.getObject(i));
+                } else if (rsCounter == 1) {
+                    if (i == 1) {
+                        assertEquals("A", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(90, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("DYN1", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(3, rs.getObject(i));
+                    }
+                } else if (rsCounter == 2) {
+                    if (i == 1) {
+                        assertEquals("A", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(100, rs.getObject(i));
+                    } else if (i ==2) {
+                        assertEquals("DYN1", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test", rs.getObject(i));
+                    }
+                }
+            }
+            rsCounter++;
+        }
+    }
+
+    @Test
+    // Test the case where the table DDL contains 1 primary key column and other columns as well
+    public void testSinglePkAndOtherColsWithDynamicColumns() throws SQLException {
+        Connection conn = DriverManager.getConnection(getUrl());
+        conn.setAutoCommit(true);
+        String tableName = generateUniqueName();
+
+        conn.createStatement().execute(generateTableCreateDDL(tableName,
+                " (A INTEGER PRIMARY KEY, B VARCHAR)"));
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B) VALUES(10, 'test1')");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B, DYN1 INTEGER) VALUES(90, 'test2', 3)");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, DYN1 INTEGER, DYN2 VARCHAR) VALUES(100, 5, 'test3')");
+
+        ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM " + tableName);
+        int rsCounter = 0;
+        while(rs.next()) {
+            ResultSetMetaData rmd = rs.getMetaData();
+            int count = rmd.getColumnCount();
+            assertEquals(rsCounter == 0 ? 2 : rsCounter == 1 ? 3 : 4,
+                    count);
+            for (int i = 1; i <= count; i++) {
+                if (rsCounter == 0) {
+                    if (i == 1) {
+                        assertEquals("A", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(10, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("B", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test1", rs.getObject(i));
+                    }
+                } else if (rsCounter == 1) {
+                    if (i == 1) {
+                        assertEquals("A", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(90, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("B", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test2", rs.getObject(i));
+                    } else if (i == 3) {
+                        assertEquals("DYN1", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(3, rs.getObject(i));
+                    }
+                } else if (rsCounter == 2) {
+                    if (i == 1) {
+                        assertEquals("A", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(100, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("B", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        // Note that we didn't upsert any value for column 'B' so we should get null
+                        assertEquals(null, rs.getObject(i));
+                    } else if (i == 3) {
+                        assertEquals("DYN1", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(5, rs.getObject(i));
+                    } else if (i == 4) {
+                        assertEquals("DYN2", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test3", rs.getObject(i));
+                    }
+                }
+            }
+            rsCounter++;
+        }
+    }
+
+    @Test
+    // Test the case where the table DDL contains just the composite key and no other columns
+    public void testCompositeKeyWithDynamicColumns() throws SQLException {
+        Connection conn = DriverManager.getConnection(getUrl());
+        conn.setAutoCommit(true);
+        String tableName = generateUniqueName();
+
+        conn.createStatement().execute(generateTableCreateDDL(tableName,
+                " (A INTEGER NOT NULL, B INTEGER NOT NULL CONSTRAINT PK PRIMARY KEY (A, B))"));
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B) VALUES(10, 500)");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B, DYN1 INTEGER) VALUES(90, 100, 3)");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B, DYN2 VARCHAR) VALUES(999, 50, 'test1')");
+
+        ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM " + tableName);
+        int rsCounter = 0;
+        while(rs.next()) {
+            ResultSetMetaData rmd = rs.getMetaData();
+            int count = rmd.getColumnCount();
+            assertEquals(rsCounter == 0 ? 2 : 3, count);
+            for (int i = 1; i <= count; i++) {
+                if (rsCounter == 0) {
+                    if (i == 1) {
+                        assertEquals("A", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(10, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("B", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(500, rs.getObject(i));
+                    }
+                } else if (rsCounter == 1) {
+                    if (i == 1) {
+                        assertEquals("A", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(90, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("B", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(100, rs.getObject(i));
+                    } else if (i == 3) {
+                        assertEquals("DYN1", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(3, rs.getObject(i));
+                    }
+                } else if (rsCounter == 2) {
+                    if (i == 1) {
+                        assertEquals("A", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(999, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("B", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(50, rs.getObject(i));
+                    } else if (i == 3) {
+                        assertEquals("DYN2", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test1", rs.getObject(i));
+                    }
+                }
+            }
+            rsCounter++;
+        }
+    }
+
+    @Test
+    // Test the case where the table DDL contains the composite key and other columns
+    public void testCompositeKeyAndOtherColsWithDynamicColumns() throws SQLException {
+        Connection conn = DriverManager.getConnection(getUrl());
+        conn.setAutoCommit(true);
+        String tableName = generateUniqueName();
+
+        conn.createStatement().execute(generateTableCreateDDL(tableName,
+                " (A INTEGER NOT NULL, B INTEGER NOT NULL, C VARCHAR" +
+                " CONSTRAINT PK PRIMARY KEY (A, B))"));
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B) VALUES(10, 500)");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B, C) VALUES(20, 7, 'test1')");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B, DYN1 INTEGER) VALUES(30, 100, 3)");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B, C, DYN2 VARCHAR, DYN3 BIGINT) VALUES(40, 60, 'test1', 'test2', 8)");
+
+        ResultSet rs = conn.createStatement().executeQuery("SELECT * FROM " + tableName);
+        int rsCounter = 0;
+        while(rs.next()) {
+            ResultSetMetaData rmd = rs.getMetaData();
+            int count = rmd.getColumnCount();
+            assertEquals(rsCounter <= 1 ?
+                    3 : rsCounter == 2 ? 4 : 5, count);
+            for (int i = 1; i <= count; i++) {
+                if (rsCounter == 0) {
+                    if (i == 1) {
+                        assertEquals("A", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(10, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("B", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(500, rs.getObject(i));
+                    } else if (i == 3) {
+                        assertEquals("C", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals(null, rs.getObject(i));
+                    }
+                } else if (rsCounter == 1) {
+                    if (i == 1) {
+                        assertEquals("A", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(20, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("B", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(7, rs.getObject(i));
+                    } else if (i == 3) {
+                        assertEquals("C", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test1", rs.getObject(i));
+                    }
+                } else if (rsCounter == 2) {
+                    if (i == 1) {
+                        assertEquals("A", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(30, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("B", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(100, rs.getObject(i));
+                    } else if (i == 3) {
+                        assertEquals("C", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals(null, rs.getObject(i));
+                    } else if (i == 4) {
+                        assertEquals("DYN1", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(3, rs.getObject(i));
+                    }
+                } else if (rsCounter == 3) {
+                    if (i == 1) {
+                        assertEquals("A", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(40, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("B", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(60, rs.getObject(i));
+                    } else if (i == 3) {
+                        assertEquals("C", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test1", rs.getObject(i));
+                    } else if (i == 4) {
+                        assertEquals("DYN2", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test2", rs.getObject(i));
+                    } else if (i == 5) {
+                        assertEquals("DYN3", rmd.getColumnName(i));
+                        assertEquals(BIGINT, rmd.getColumnType(i));
+                        assertEquals(8L, rs.getObject(i));
+                    }
+                }
+            }
+            rsCounter++;
+        }
+    }
+
+    @Test
+    // Test if dynamic columns are properly exposed in column family wildcard queries
+    public void testColumnFamilyWildcards() throws SQLException {
+        Connection conn = DriverManager.getConnection(getUrl());
+        conn.setAutoCommit(true);
+        String tableName = generateUniqueName();
+
+        conn.createStatement().execute(generateTableCreateDDL(tableName,
+                " (A INTEGER PRIMARY KEY, B VARCHAR, CF1.C INTEGER, CF2.D VARCHAR)"));
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B, C, D) VALUES(10, 'test1', 2, 'test2')");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B, C, D, DYN0 INTEGER) VALUES(20, 'test3', 4, 'test4', 100)");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B, C, D, CF1.DYN1 VARCHAR, CF1.DYN2 INTEGER)" +
+                " VALUES(30, 'test5', 5, 'test6', 'test7', 70)");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B, C, D, CF2.DYN1 VARCHAR, CF2.DYN2 INTEGER)" +
+                " VALUES(40, 'test8', 6, 'test9', 'test10', 80)");
+        conn.createStatement().execute("UPSERT INTO " + tableName +
+                " (A, B, C, D, CF1.DYN3 VARCHAR, CF2.DYN4 INTEGER)" +
+                " VALUES(50, 'test11', 7, 'test12', 'test13', 90)");
+
+        ResultSet rs = conn.createStatement().executeQuery("SELECT CF1.* FROM " + tableName);
+        int rsCounter = 0;
+        while(rs.next()) {
+            ResultSetMetaData rmd = rs.getMetaData();
+            int count = rmd.getColumnCount();
+            assertEquals(rsCounter <= 1 || rsCounter == 3 ?
+                    1 : rsCounter == 2 ? 3 : 2, count);
+            for (int i = 1; i <= count; i++) {
+                if (rsCounter == 0) {
+                    if (i == 1) {
+                        assertEquals("C", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(2, rs.getObject(i));
+                    }
+                } else if (rsCounter == 1) {
+                    if (i == 1) {
+                        assertEquals("C", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(4, rs.getObject(i));
+                    }
+                } else if (rsCounter == 2) {
+                    if (i == 1) {
+                        assertEquals("C", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(5, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("DYN1", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test7", rs.getObject(i));
+                    } else if (i == 3) {
+                        assertEquals("DYN2", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(70, rs.getObject(i));
+                    }
+                } else if (rsCounter == 3) {
+                    if (i == 1) {
+                        assertEquals("C", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(6, rs.getObject(i));
+                    }
+                } else if (rsCounter == 4) {
+                    if (i == 1) {
+                        assertEquals("C", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(7, rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("DYN3", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test13", rs.getObject(i));
+                    }
+                }
+            }
+            rsCounter++;
+        }
+
+        rs = conn.createStatement().executeQuery("SELECT CF2.* FROM " + tableName);
+        rsCounter = 0;
+        while(rs.next()) {
+            ResultSetMetaData rmd = rs.getMetaData();
+            int count = rmd.getColumnCount();
+            assertEquals(rsCounter <= 2 ?
+                    1 : rsCounter == 3 ? 3 : 2, count);
+            for (int i = 1; i <= count; i++) {
+                if (rsCounter == 0) {
+                    if (i == 1) {
+                        assertEquals("D", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test2", rs.getObject(i));
+                    }
+                } else if (rsCounter == 1) {
+                    if (i == 1) {
+                        assertEquals("D", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test4", rs.getObject(i));
+                    }
+                } else if (rsCounter == 2) {
+                    if (i == 1) {
+                        assertEquals("D", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test6", rs.getObject(i));
+                    }
+                } else if (rsCounter == 3) {
+                    if (i == 1) {
+                        assertEquals("D", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test9", rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("DYN1", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test10", rs.getObject(i));
+                    } else if (i == 3) {
+                        assertEquals("DYN2", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(80, rs.getObject(i));
+                    }
+                } else if (rsCounter == 4) {
+                    if (i == 1) {
+                        assertEquals("D", rmd.getColumnName(i));
+                        assertEquals(VARCHAR, rmd.getColumnType(i));
+                        assertEquals("test12", rs.getObject(i));
+                    } else if (i == 2) {
+                        assertEquals("DYN4", rmd.getColumnName(i));
+                        assertEquals(INTEGER, rmd.getColumnType(i));
+                        assertEquals(90, rs.getObject(i));
+                    }
+                }
+            }
+            rsCounter++;
+        }
+
+    }
+
+}
index 1b97b50..51a5294 100644 (file)
@@ -17,6 +17,9 @@
  */
 package org.apache.phoenix.compile;
 
+import static org.apache.phoenix.query.QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB;
+import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB;
+
 import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
@@ -110,9 +113,13 @@ public class ProjectionCompiler {
     }
     
     public static RowProjector compile(StatementContext context, SelectStatement statement, GroupBy groupBy) throws SQLException  {
-        return compile(context, statement, groupBy, Collections.<PColumn>emptyList(), 
-                NULL_EXPRESSION// Pass null expression because we don't want empty key value to be projected
-                );
+        boolean wildcardIncludesDynamicCols = context.getConnection().getQueryServices()
+                .getConfiguration().getBoolean(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB,
+                        DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB);
+        return compile(context, statement, groupBy, Collections.<PColumn>emptyList(),
+                // Pass null expression because we don't want empty key value to be projected
+                NULL_EXPRESSION,
+                wildcardIncludesDynamicCols);
     }
     
     private static int getMinPKOffset(PTable table, PName tenantId) {
@@ -337,23 +344,29 @@ public class ProjectionCompiler {
     /**
      * Builds the projection for the scan
      * @param context query context kept between compilation of different query clauses
-     * @param statement TODO
+     * @param statement the statement being compiled
      * @param groupBy compiled GROUP BY clause
      * @param targetColumns list of columns, parallel to aliasedNodes, that are being set for an
      * UPSERT SELECT statement. Used to coerce expression types to the expected target type.
+     * @param where the where clause expression
+     * @param wildcardIncludesDynamicCols true if wildcard queries should include dynamic columns
      * @return projector used to access row values during scan
      * @throws SQLException 
      */
-    public static RowProjector compile(StatementContext context, SelectStatement statement, GroupBy groupBy, List<? extends PDatum> targetColumns, Expression where) throws SQLException {
-        List<KeyValueColumnExpression> arrayKVRefs = new ArrayList<KeyValueColumnExpression>();
-        List<ProjectedColumnExpression> arrayProjectedColumnRefs = new ArrayList<ProjectedColumnExpression>();
-        List<Expression> arrayKVFuncs = new ArrayList<Expression>();
-        List<Expression> arrayOldFuncs = new ArrayList<Expression>();
+    public static RowProjector compile(StatementContext context, SelectStatement statement,
+            GroupBy groupBy, List<? extends PDatum> targetColumns, Expression where,
+            boolean wildcardIncludesDynamicCols) throws SQLException {
+        List<KeyValueColumnExpression> arrayKVRefs = new ArrayList<>();
+        List<ProjectedColumnExpression> arrayProjectedColumnRefs = new ArrayList<>();
+        List<Expression> arrayKVFuncs = new ArrayList<>();
+        List<Expression> arrayOldFuncs = new ArrayList<>();
         Map<Expression, Integer> arrayExpressionCounts = new HashMap<>();
         List<AliasedNode> aliasedNodes = statement.getSelect();
         // Setup projected columns in Scan
-        SelectClauseVisitor selectVisitor = new SelectClauseVisitor(context, groupBy, arrayKVRefs, arrayKVFuncs, arrayExpressionCounts, arrayProjectedColumnRefs, arrayOldFuncs, statement);
-        List<ExpressionProjector> projectedColumns = new ArrayList<ExpressionProjector>();
+        SelectClauseVisitor selectVisitor = new SelectClauseVisitor(context, groupBy, arrayKVRefs,
+                arrayKVFuncs, arrayExpressionCounts, arrayProjectedColumnRefs, arrayOldFuncs,
+                statement);
+        List<ExpressionProjector> projectedColumns = new ArrayList<>();
         ColumnResolver resolver = context.getResolver();
         TableRef tableRef = context.getCurrentTable();
         PTable table = tableRef.getTable();
@@ -468,7 +481,9 @@ public class ProjectionCompiler {
         }
 
         boolean isProjectEmptyKeyValue = false;
-        if (isWildcard) {
+        // Don't project known/declared column families into the scan if we want to support
+        // surfacing dynamic columns in wildcard queries
+        if (isWildcard && !wildcardIncludesDynamicCols) {
             projectAllColumnFamilies(table, scan);
         } else {
             isProjectEmptyKeyValue = where == null || LiteralExpression.isTrue(where) || where.requiresFinalEvaluation();
@@ -501,7 +516,9 @@ public class ProjectionCompiler {
                 // Ignore as this can happen for local indexes when the data table has a column family, but there are no covered columns in the family
             }
         }
-        return new RowProjector(projectedColumns, Math.max(estimatedKeySize, estimatedByteSize), isProjectEmptyKeyValue, resolver.hasUDFs(), isWildcard);
+        return new RowProjector(projectedColumns, Math.max(estimatedKeySize, estimatedByteSize),
+                isProjectEmptyKeyValue, resolver.hasUDFs(), isWildcard,
+                wildcardIncludesDynamicCols);
     }
 
     private static void projectAllColumnFamilies(PTable table, Scan scan) {
index 15cdc10..d773273 100644 (file)
@@ -17,6 +17,9 @@
  */
 package org.apache.phoenix.compile;
 
+import static org.apache.phoenix.query.QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB;
+import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB;
+
 import java.sql.SQLException;
 import java.sql.SQLFeatureNotSupportedException;
 import java.util.ArrayList;
@@ -50,7 +53,6 @@ import org.apache.phoenix.expression.RowValueConstructorExpression;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.iterate.ParallelIteratorFactory;
 import org.apache.phoenix.jdbc.PhoenixConnection;
-import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.join.HashJoinInfo;
 import org.apache.phoenix.optimize.Cost;
@@ -213,7 +215,11 @@ public class QueryCompiler {
                 context.setCurrentTable(table.getTableRef());
                 PTable projectedTable = table.createProjectedTable(!projectPKColumns, context);
                 TupleProjector projector = new TupleProjector(projectedTable);
-                TupleProjector.serializeProjectorIntoScan(context.getScan(), projector);
+                boolean wildcardIncludesDynamicCols = context.getConnection().getQueryServices()
+                        .getConfiguration().getBoolean(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB,
+                                DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB);
+                TupleProjector.serializeProjectorIntoScan(context.getScan(), projector,
+                        wildcardIncludesDynamicCols);
                 context.setResolver(FromCompiler.getResolverForProjectedTable(projectedTable, context.getConnection(), subquery.getUdfParseNodes()));
                 table.projectColumns(context.getScan());
                 return compileSingleFlatQuery(context, subquery, binds, asSubquery, !asSubquery, null, projectPKColumns ? projector : null, true);
@@ -252,6 +258,9 @@ public class QueryCompiler {
     protected QueryPlan compileJoinQuery(JoinCompiler.Strategy strategy, StatementContext context, List<Object> binds, JoinTable joinTable, boolean asSubquery, boolean projectPKColumns, List<OrderByNode> orderBy) throws SQLException {
         byte[] emptyByteArray = new byte[0];
         List<JoinSpec> joinSpecs = joinTable.getJoinSpecs();
+        boolean wildcardIncludesDynamicCols = context.getConnection().getQueryServices()
+                .getConfiguration().getBoolean(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB,
+                        DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB);
         switch (strategy) {
             case HASH_BUILD_RIGHT: {
                 boolean[] starJoinVector = joinTable.getStarJoinVector();
@@ -318,7 +327,8 @@ public class QueryCompiler {
                     }
                     hashPlans[i] = new HashSubPlan(i, subPlans[i], optimized ? null : hashExpressions, joinSpec.isSingleValueOnly(), usePersistentCache, keyRangeLhsExpression, keyRangeRhsExpression);
                 }
-                TupleProjector.serializeProjectorIntoScan(context.getScan(), tupleProjector);
+                TupleProjector.serializeProjectorIntoScan(context.getScan(), tupleProjector,
+                        wildcardIncludesDynamicCols);
                 QueryPlan plan = compileSingleFlatQuery(context, query, binds, asSubquery, !asSubquery && joinTable.isAllLeftJoin(), null, !table.isSubselect() && projectPKColumns ? tupleProjector : null, true);
                 Expression postJoinFilterExpression = joinTable.compilePostFilterExpression(context);
                 Integer limit = null;
@@ -370,7 +380,8 @@ public class QueryCompiler {
                 PTable lhsTable = needsMerge ? lhsCtx.getResolver().getTables().get(0).getTable() : null;
                 int fieldPosition = needsMerge ? rhsProjTable.getColumns().size() - rhsProjTable.getPKColumns().size() : 0;
                 PTable projectedTable = needsMerge ? JoinCompiler.joinProjectedTables(rhsProjTable, lhsTable, type == JoinType.Right ? JoinType.Left : type) : rhsProjTable;
-                TupleProjector.serializeProjectorIntoScan(context.getScan(), tupleProjector);
+                TupleProjector.serializeProjectorIntoScan(context.getScan(), tupleProjector,
+                        wildcardIncludesDynamicCols);
                 context.setResolver(FromCompiler.getResolverForProjectedTable(projectedTable, context.getConnection(), rhs.getUdfParseNodes()));
                 QueryPlan rhsPlan = compileSingleFlatQuery(context, rhs, binds, asSubquery, !asSubquery && type == JoinType.Right, null, !rhsTable.isSubselect() && projectPKColumns ? tupleProjector : null, true);
                 Expression postJoinFilterExpression = joinTable.compilePostFilterExpression(context);
@@ -561,7 +572,12 @@ public class QueryCompiler {
         // definitively whether or not we'll traverse in row key order.
         groupBy = groupBy.compile(context, innerPlanTupleProjector);
         context.setResolver(resolver); // recover resolver
-        RowProjector projector = ProjectionCompiler.compile(context, select, groupBy, asSubquery ? Collections.<PDatum>emptyList() : targetColumns, where);
+        boolean wildcardIncludesDynamicCols = context.getConnection().getQueryServices()
+                .getConfiguration().getBoolean(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB,
+                        DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB);
+        RowProjector projector = ProjectionCompiler.compile(context, select, groupBy,
+                asSubquery ? Collections.emptyList() : targetColumns, where,
+                wildcardIncludesDynamicCols);
         OrderBy orderBy = OrderByCompiler.compile(
                 context,
                 select,
@@ -586,7 +602,9 @@ public class QueryCompiler {
         }
 
         if (projectedTable != null) {
-            TupleProjector.serializeProjectorIntoScan(context.getScan(), new TupleProjector(projectedTable));
+            TupleProjector.serializeProjectorIntoScan(context.getScan(),
+                    new TupleProjector(projectedTable), wildcardIncludesDynamicCols &&
+                            projector.projectDynColsInWildcardQueries());
         }
         
         QueryPlan plan = innerPlan;
index 8532e0c..356e7a3 100644 (file)
@@ -52,9 +52,12 @@ public class RowProjector {
     private final boolean isProjectEmptyKeyValue;
     private final boolean cloneRequired;
     private final boolean hasUDFs;
-    
+    private final boolean isProjectDynColsInWildcardQueries;
+
     public RowProjector(RowProjector projector, boolean isProjectEmptyKeyValue) {
-        this(projector.getColumnProjectors(), projector.getEstimatedRowByteSize(), isProjectEmptyKeyValue, projector.hasUDFs, projector.isProjectAll);
+        this(projector.getColumnProjectors(), projector.getEstimatedRowByteSize(),
+                isProjectEmptyKeyValue, projector.hasUDFs, projector.isProjectAll,
+                projector.isProjectDynColsInWildcardQueries);
     }
     /**
      * Construct RowProjector based on a list of ColumnProjectors.
@@ -62,20 +65,25 @@ public class RowProjector {
      * aggregating coprocessor. Only required in the case of an aggregate query with a limit clause and otherwise may
      * be null.
      * @param estimatedRowSize 
+     * @param isProjectEmptyKeyValue
      */
     public RowProjector(List<? extends ColumnProjector> columnProjectors, int estimatedRowSize, boolean isProjectEmptyKeyValue) {
-        this(columnProjectors, estimatedRowSize, isProjectEmptyKeyValue, false, false);
+        this(columnProjectors, estimatedRowSize, isProjectEmptyKeyValue, false, false, false);
     }
     /**
      * Construct RowProjector based on a list of ColumnProjectors.
      * @param columnProjectors ordered list of ColumnProjectors corresponding to projected columns in SELECT clause
      * aggregating coprocessor. Only required in the case of an aggregate query with a limit clause and otherwise may
      * be null.
-     * @param estimatedRowSize 
+     * @param estimatedRowSize
      * @param isProjectEmptyKeyValue
      * @param hasUDFs
+     * @param isProjectAll
+     * @param isProjectDynColsInWildcardQueries
      */
-    public RowProjector(List<? extends ColumnProjector> columnProjectors, int estimatedRowSize, boolean isProjectEmptyKeyValue, boolean hasUDFs, boolean isProjectAll) {
+    public RowProjector(List<? extends ColumnProjector> columnProjectors, int estimatedRowSize,
+            boolean isProjectEmptyKeyValue, boolean hasUDFs, boolean isProjectAll,
+            boolean isProjectDynColsInWildcardQueries) {
         this.columnProjectors = Collections.unmodifiableList(columnProjectors);
         int position = columnProjectors.size();
         reverseIndex = ArrayListMultimap.<String, Integer>create();
@@ -107,6 +115,7 @@ public class RowProjector {
             }
         }
         this.cloneRequired = cloneRequired || hasUDFs;
+        this.isProjectDynColsInWildcardQueries = isProjectDynColsInWildcardQueries;
     }
 
     public RowProjector cloneIfNecessary() {
@@ -129,7 +138,8 @@ public class RowProjector {
             }
         }
         return new RowProjector(clonedColProjectors, 
-                this.estimatedSize, this.isProjectEmptyKeyValue, this.hasUDFs, this.isProjectAll);
+                this.estimatedSize, this.isProjectEmptyKeyValue, this.hasUDFs, this.isProjectAll,
+                this.isProjectDynColsInWildcardQueries);
     }
 
     public boolean projectEveryRow() {
@@ -139,6 +149,14 @@ public class RowProjector {
     public boolean projectEverything() {
         return isProjectAll;
     }
+
+    public boolean hasUDFs() {
+        return hasUDFs;
+    }
+
+    public boolean projectDynColsInWildcardQueries() {
+        return isProjectDynColsInWildcardQueries;
+    }
     
     public List<? extends ColumnProjector> getColumnProjectors() {
         return columnProjectors;
index dab2457..065e0cb 100644 (file)
@@ -167,7 +167,6 @@ public class TupleProjectionCompiler {
                 .setBaseColumnCount(BASE_TABLE_BASE_COLUMN_COUNT)
                 .setExcludedColumns(ImmutableList.of())
                 .setPhysicalNames(ImmutableList.of())
-                .setColumns(projectedColumns)
                 .build();
     }
     
index 7ec96a3..0d65ae2 100644 (file)
@@ -728,7 +728,6 @@ public class UpsertCompiler {
                     PTable projectedTable = PTableImpl.builderWithColumns(table, projectedColumns)
                             .setExcludedColumns(ImmutableList.of())
                             .setDefaultFamilyName(PNameFactory.newName(SchemaUtil.getEmptyColumnFamily(table)))
-                            .setColumns(projectedColumns)
                             .build();
                     
                     SelectStatement select = SelectStatement.create(SelectStatement.COUNT_ONE, upsert.getHint());
index 3489b79..404a7ad 100644 (file)
@@ -364,14 +364,14 @@ public class HashJoinRegionScanner implements RegionScanner {
 
         if (dest instanceof ProjectedValueTuple) {
             return TupleProjector.mergeProjectedValue(
-                (ProjectedValueTuple) dest, destSchema, destBitSet, src,
-                srcSchema, srcBitSet, offset, useNewValueColumnQualifier);
+                (ProjectedValueTuple) dest, destBitSet, src,
+                srcBitSet, offset, useNewValueColumnQualifier);
         }
 
         ProjectedValueTuple first = projector.projectResults(
             new SingleKeyValueTuple(dest.getValue(0)));
         ProjectedValueTuple merged = TupleProjector.mergeProjectedValue(
-            first, destSchema, destBitSet, src, srcSchema,
+            first, destBitSet, src,
             srcBitSet, offset, useNewValueColumnQualifier);
 
         int size = dest.size();
index 334bcfa..ae2a6fd 100644 (file)
@@ -21,20 +21,33 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.List;
+import java.util.NavigableMap;
 import java.util.Optional;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.RegionObserver;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.WritableUtils;
+import org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos;
+import org.apache.phoenix.coprocessor.generated.PTableProtos;
 import org.apache.phoenix.expression.OrderByExpression;
 import org.apache.phoenix.iterate.NonAggregateRegionScannerFactory;
-import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.util.EncodedColumnsUtil;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PColumnImpl;
 import org.apache.phoenix.util.ScanUtil;
+import org.apache.phoenix.util.ServerUtil;
+
+import static org.apache.phoenix.schema.types.PDataType.TRUE_BYTES;
 
 /**
  *
@@ -47,7 +60,15 @@ import org.apache.phoenix.util.ScanUtil;
  * @since 0.1
  */
 public class ScanRegionObserver extends BaseScannerRegionObserver implements RegionCoprocessor {
-    
+
+    private static final Log LOG = LogFactory.getLog(ScanRegionObserver.class);
+    public static final byte[] DYN_COLS_METADATA_CELL_QUALIFIER = Bytes.toBytes("D#");
+    public static final String DYNAMIC_COLUMN_METADATA_STORED_FOR_MUTATION =
+            "_DynColsMetadataStoredForMutation";
+    // Scan attribute that is set in case we want to project dynamic columns
+    public static final String WILDCARD_SCAN_INCLUDES_DYNAMIC_COLUMNS =
+            "_WildcardScanIncludesDynCols";
+
     @Override
     public Optional<RegionObserver> getRegionObserver() {
       return Optional.of(this);
@@ -77,6 +98,97 @@ public class ScanRegionObserver extends BaseScannerRegionObserver implements Reg
     }
 
     @Override
+    public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
+            MiniBatchOperationInProgress<Mutation> miniBatchOp) throws IOException {
+        try {
+            preBatchMutateWithExceptions(miniBatchOp, c.getEnvironment().getRegion()
+                    .getTableDescriptor().getTableName().getNameAsString());
+        } catch(Throwable t) {
+            // Wrap all exceptions in an IOException to prevent region server crashes
+            throw ServerUtil.createIOException("Unable to Put cells corresponding to dynamic" +
+                    "column metadata for " +
+                    c.getEnvironment().getRegion().getRegionInfo().getTable().getNameAsString(), t);
+        }
+    }
+
+    /**
+     * In case we are supporting exposing dynamic columns for wildcard queries, which is based on
+     * the client-side config
+     * {@link org.apache.phoenix.query.QueryServices#WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB},
+     * we previously set attributes on the Put mutations where the key is the column family and
+     * the value is the serialized list of dynamic columns.
+     * Here we iterate over all Put mutations and add metadata for the list of dynamic columns for
+     * each column family in its own cell under reserved qualifiers. See PHOENIX-374
+     * @param miniBatchOp batch of mutations getting applied to region
+     * @param tableName Name of table served by region
+     * @throws IOException If an I/O error occurs when parsing protobuf
+     */
+    private void preBatchMutateWithExceptions(MiniBatchOperationInProgress<Mutation> miniBatchOp,
+            String tableName)
+    throws IOException {
+        for (int i = 0; i < miniBatchOp.size(); i++) {
+            Mutation m = miniBatchOp.getOperation(i);
+            // There is at max 1 extra Put (for dynamic column shadow cells) per original Put
+            Put dynColShadowCellsPut = null;
+            if (m instanceof Put && Bytes.equals(m.getAttribute(
+                    DYNAMIC_COLUMN_METADATA_STORED_FOR_MUTATION), TRUE_BYTES)) {
+                if (LOG.isDebugEnabled()) {
+                    LOG.debug("Adding dynamic column metadata for table: " + tableName + ". Put :" +
+                            m.toString());
+                }
+                NavigableMap<byte[], List<Cell>> famCellMap = m.getFamilyCellMap();
+                for (byte[] fam : famCellMap.keySet()) {
+                    byte[] serializedDynColsList = m.getAttribute(Bytes.toString(fam));
+                    if (serializedDynColsList == null) {
+                        // There are no dynamic columns for this column family
+                        continue;
+                    }
+                    List<PTableProtos.PColumn> dynColsInThisFam = DynamicColumnMetaDataProtos.
+                            DynamicColumnMetaData.parseFrom(serializedDynColsList)
+                            .getDynamicColumnsList();
+                    if (dynColsInThisFam.isEmpty()) {
+                        continue;
+                    }
+                    if (dynColShadowCellsPut == null) {
+                        dynColShadowCellsPut = new Put(m.getRow());
+                    }
+                    for (PTableProtos.PColumn dynColProto : dynColsInThisFam) {
+                        // Add a column for this dynamic column to the metadata Put operation
+                        dynColShadowCellsPut.addColumn(fam,
+                                getQualifierForDynamicColumnMetaDataCell(dynColProto),
+                                dynColProto.toByteArray());
+                    }
+                }
+            }
+            if (dynColShadowCellsPut != null) {
+                miniBatchOp.addOperationsFromCP(i, new Mutation[]{dynColShadowCellsPut});
+            }
+        }
+    }
+
+    /**
+     * We store the metadata for each dynamic cell in a separate cell in the same column family.
+     * The column qualifier for this cell is:
+     * {@link ScanRegionObserver#DYN_COLS_METADATA_CELL_QUALIFIER} concatenated with the
+     * qualifier of the actual dynamic column
+     * @param dynColProto Protobuf representation of the dynamic column PColumn
+     * @return Final qualifier for the metadata cell
+     * @throws IOException If an I/O error occurs when parsing the byte array output stream
+     */
+    private static byte[] getQualifierForDynamicColumnMetaDataCell(PTableProtos.PColumn dynColProto)
+    throws IOException {
+        PColumn dynCol = PColumnImpl.createFromProto(dynColProto);
+        ByteArrayOutputStream qual = new ByteArrayOutputStream();
+        qual.write(DYN_COLS_METADATA_CELL_QUALIFIER);
+        qual.write(dynCol.getColumnQualifierBytes());
+        if (LOG.isTraceEnabled()) {
+            LOG.trace("Storing shadow cell for dynamic column metadata for dynamic column : " +
+                    dynCol.getFamilyName().getString() + "." + dynCol.getName().getString());
+        }
+        return qual.toByteArray();
+    }
+
+    @Override
     protected RegionScanner doPostScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c, final Scan scan, final RegionScanner s) throws Throwable {
         NonAggregateRegionScannerFactory nonAggregateROUtil = new NonAggregateRegionScannerFactory(c.getEnvironment());
         return nonAggregateROUtil.getRegionScanner(scan, s);
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/DynamicColumnMetaDataProtos.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/DynamicColumnMetaDataProtos.java
new file mode 100644 (file)
index 0000000..760f8f9
--- /dev/null
@@ -0,0 +1,774 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: DynamicColumnMetaData.proto
+
+package org.apache.phoenix.coprocessor.generated;
+
+public final class DynamicColumnMetaDataProtos {
+  private DynamicColumnMetaDataProtos() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  public interface DynamicColumnMetaDataOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // repeated .PColumn dynamicColumns = 1;
+    /**
+     * <code>repeated .PColumn dynamicColumns = 1;</code>
+     */
+    java.util.List<org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn> 
+        getDynamicColumnsList();
+    /**
+     * <code>repeated .PColumn dynamicColumns = 1;</code>
+     */
+    org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn getDynamicColumns(int index);
+    /**
+     * <code>repeated .PColumn dynamicColumns = 1;</code>
+     */
+    int getDynamicColumnsCount();
+    /**
+     * <code>repeated .PColumn dynamicColumns = 1;</code>
+     */
+    java.util.List<? extends org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder> 
+        getDynamicColumnsOrBuilderList();
+    /**
+     * <code>repeated .PColumn dynamicColumns = 1;</code>
+     */
+    org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder getDynamicColumnsOrBuilder(
+        int index);
+  }
+  /**
+   * Protobuf type {@code DynamicColumnMetaData}
+   */
+  public static final class DynamicColumnMetaData extends
+      com.google.protobuf.GeneratedMessage
+      implements DynamicColumnMetaDataOrBuilder {
+    // Use DynamicColumnMetaData.newBuilder() to construct.
+    private DynamicColumnMetaData(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private DynamicColumnMetaData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final DynamicColumnMetaData defaultInstance;
+    public static DynamicColumnMetaData getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public DynamicColumnMetaData getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private DynamicColumnMetaData(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 10: {
+              if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+                dynamicColumns_ = new java.util.ArrayList<org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn>();
+                mutable_bitField0_ |= 0x00000001;
+              }
+              dynamicColumns_.add(input.readMessage(org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.PARSER, extensionRegistry));
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+          dynamicColumns_ = java.util.Collections.unmodifiableList(dynamicColumns_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.internal_static_DynamicColumnMetaData_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.internal_static_DynamicColumnMetaData_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.class, org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<DynamicColumnMetaData> PARSER =
+        new com.google.protobuf.AbstractParser<DynamicColumnMetaData>() {
+      public DynamicColumnMetaData parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new DynamicColumnMetaData(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<DynamicColumnMetaData> getParserForType() {
+      return PARSER;
+    }
+
+    // repeated .PColumn dynamicColumns = 1;
+    public static final int DYNAMICCOLUMNS_FIELD_NUMBER = 1;
+    private java.util.List<org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn> dynamicColumns_;
+    /**
+     * <code>repeated .PColumn dynamicColumns = 1;</code>
+     */
+    public java.util.List<org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn> getDynamicColumnsList() {
+      return dynamicColumns_;
+    }
+    /**
+     * <code>repeated .PColumn dynamicColumns = 1;</code>
+     */
+    public java.util.List<? extends org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder> 
+        getDynamicColumnsOrBuilderList() {
+      return dynamicColumns_;
+    }
+    /**
+     * <code>repeated .PColumn dynamicColumns = 1;</code>
+     */
+    public int getDynamicColumnsCount() {
+      return dynamicColumns_.size();
+    }
+    /**
+     * <code>repeated .PColumn dynamicColumns = 1;</code>
+     */
+    public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn getDynamicColumns(int index) {
+      return dynamicColumns_.get(index);
+    }
+    /**
+     * <code>repeated .PColumn dynamicColumns = 1;</code>
+     */
+    public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder getDynamicColumnsOrBuilder(
+        int index) {
+      return dynamicColumns_.get(index);
+    }
+
+    private void initFields() {
+      dynamicColumns_ = java.util.Collections.emptyList();
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      for (int i = 0; i < getDynamicColumnsCount(); i++) {
+        if (!getDynamicColumns(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      for (int i = 0; i < dynamicColumns_.size(); i++) {
+        output.writeMessage(1, dynamicColumns_.get(i));
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      for (int i = 0; i < dynamicColumns_.size(); i++) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(1, dynamicColumns_.get(i));
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    @java.lang.Override
+    public boolean equals(final java.lang.Object obj) {
+      if (obj == this) {
+       return true;
+      }
+      if (!(obj instanceof org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData)) {
+        return super.equals(obj);
+      }
+      org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData other = (org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData) obj;
+
+      boolean result = true;
+      result = result && getDynamicColumnsList()
+          .equals(other.getDynamicColumnsList());
+      result = result &&
+          getUnknownFields().equals(other.getUnknownFields());
+      return result;
+    }
+
+    private int memoizedHashCode = 0;
+    @java.lang.Override
+    public int hashCode() {
+      if (memoizedHashCode != 0) {
+        return memoizedHashCode;
+      }
+      int hash = 41;
+      hash = (19 * hash) + getDescriptorForType().hashCode();
+      if (getDynamicColumnsCount() > 0) {
+        hash = (37 * hash) + DYNAMICCOLUMNS_FIELD_NUMBER;
+        hash = (53 * hash) + getDynamicColumnsList().hashCode();
+      }
+      hash = (29 * hash) + getUnknownFields().hashCode();
+      memoizedHashCode = hash;
+      return hash;
+    }
+
+    public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code DynamicColumnMetaData}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaDataOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.internal_static_DynamicColumnMetaData_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.internal_static_DynamicColumnMetaData_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.class, org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.Builder.class);
+      }
+
+      // Construct using org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+          getDynamicColumnsFieldBuilder();
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        if (dynamicColumnsBuilder_ == null) {
+          dynamicColumns_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+        } else {
+          dynamicColumnsBuilder_.clear();
+        }
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.internal_static_DynamicColumnMetaData_descriptor;
+      }
+
+      public org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData getDefaultInstanceForType() {
+        return org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.getDefaultInstance();
+      }
+
+      public org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData build() {
+        org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData buildPartial() {
+        org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData result = new org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData(this);
+        int from_bitField0_ = bitField0_;
+        if (dynamicColumnsBuilder_ == null) {
+          if (((bitField0_ & 0x00000001) == 0x00000001)) {
+            dynamicColumns_ = java.util.Collections.unmodifiableList(dynamicColumns_);
+            bitField0_ = (bitField0_ & ~0x00000001);
+          }
+          result.dynamicColumns_ = dynamicColumns_;
+        } else {
+          result.dynamicColumns_ = dynamicColumnsBuilder_.build();
+        }
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData) {
+          return mergeFrom((org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData other) {
+        if (other == org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData.getDefaultInstance()) return this;
+        if (dynamicColumnsBuilder_ == null) {
+          if (!other.dynamicColumns_.isEmpty()) {
+            if (dynamicColumns_.isEmpty()) {
+              dynamicColumns_ = other.dynamicColumns_;
+              bitField0_ = (bitField0_ & ~0x00000001);
+            } else {
+              ensureDynamicColumnsIsMutable();
+              dynamicColumns_.addAll(other.dynamicColumns_);
+            }
+            onChanged();
+          }
+        } else {
+          if (!other.dynamicColumns_.isEmpty()) {
+            if (dynamicColumnsBuilder_.isEmpty()) {
+              dynamicColumnsBuilder_.dispose();
+              dynamicColumnsBuilder_ = null;
+              dynamicColumns_ = other.dynamicColumns_;
+              bitField0_ = (bitField0_ & ~0x00000001);
+              dynamicColumnsBuilder_ = 
+                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                   getDynamicColumnsFieldBuilder() : null;
+            } else {
+              dynamicColumnsBuilder_.addAllMessages(other.dynamicColumns_);
+            }
+          }
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        for (int i = 0; i < getDynamicColumnsCount(); i++) {
+          if (!getDynamicColumns(i).isInitialized()) {
+            
+            return false;
+          }
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos.DynamicColumnMetaData) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // repeated .PColumn dynamicColumns = 1;
+      private java.util.List<org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn> dynamicColumns_ =
+        java.util.Collections.emptyList();
+      private void ensureDynamicColumnsIsMutable() {
+        if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+          dynamicColumns_ = new java.util.ArrayList<org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn>(dynamicColumns_);
+          bitField0_ |= 0x00000001;
+         }
+      }
+
+      private com.google.protobuf.RepeatedFieldBuilder<
+          org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder> dynamicColumnsBuilder_;
+
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public java.util.List<org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn> getDynamicColumnsList() {
+        if (dynamicColumnsBuilder_ == null) {
+          return java.util.Collections.unmodifiableList(dynamicColumns_);
+        } else {
+          return dynamicColumnsBuilder_.getMessageList();
+        }
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public int getDynamicColumnsCount() {
+        if (dynamicColumnsBuilder_ == null) {
+          return dynamicColumns_.size();
+        } else {
+          return dynamicColumnsBuilder_.getCount();
+        }
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn getDynamicColumns(int index) {
+        if (dynamicColumnsBuilder_ == null) {
+          return dynamicColumns_.get(index);
+        } else {
+          return dynamicColumnsBuilder_.getMessage(index);
+        }
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public Builder setDynamicColumns(
+          int index, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn value) {
+        if (dynamicColumnsBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureDynamicColumnsIsMutable();
+          dynamicColumns_.set(index, value);
+          onChanged();
+        } else {
+          dynamicColumnsBuilder_.setMessage(index, value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public Builder setDynamicColumns(
+          int index, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder builderForValue) {
+        if (dynamicColumnsBuilder_ == null) {
+          ensureDynamicColumnsIsMutable();
+          dynamicColumns_.set(index, builderForValue.build());
+          onChanged();
+        } else {
+          dynamicColumnsBuilder_.setMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public Builder addDynamicColumns(org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn value) {
+        if (dynamicColumnsBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureDynamicColumnsIsMutable();
+          dynamicColumns_.add(value);
+          onChanged();
+        } else {
+          dynamicColumnsBuilder_.addMessage(value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public Builder addDynamicColumns(
+          int index, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn value) {
+        if (dynamicColumnsBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureDynamicColumnsIsMutable();
+          dynamicColumns_.add(index, value);
+          onChanged();
+        } else {
+          dynamicColumnsBuilder_.addMessage(index, value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public Builder addDynamicColumns(
+          org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder builderForValue) {
+        if (dynamicColumnsBuilder_ == null) {
+          ensureDynamicColumnsIsMutable();
+          dynamicColumns_.add(builderForValue.build());
+          onChanged();
+        } else {
+          dynamicColumnsBuilder_.addMessage(builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public Builder addDynamicColumns(
+          int index, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder builderForValue) {
+        if (dynamicColumnsBuilder_ == null) {
+          ensureDynamicColumnsIsMutable();
+          dynamicColumns_.add(index, builderForValue.build());
+          onChanged();
+        } else {
+          dynamicColumnsBuilder_.addMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public Builder addAllDynamicColumns(
+          java.lang.Iterable<? extends org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn> values) {
+        if (dynamicColumnsBuilder_ == null) {
+          ensureDynamicColumnsIsMutable();
+          super.addAll(values, dynamicColumns_);
+          onChanged();
+        } else {
+          dynamicColumnsBuilder_.addAllMessages(values);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public Builder clearDynamicColumns() {
+        if (dynamicColumnsBuilder_ == null) {
+          dynamicColumns_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00000001);
+          onChanged();
+        } else {
+          dynamicColumnsBuilder_.clear();
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public Builder removeDynamicColumns(int index) {
+        if (dynamicColumnsBuilder_ == null) {
+          ensureDynamicColumnsIsMutable();
+          dynamicColumns_.remove(index);
+          onChanged();
+        } else {
+          dynamicColumnsBuilder_.remove(index);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder getDynamicColumnsBuilder(
+          int index) {
+        return getDynamicColumnsFieldBuilder().getBuilder(index);
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder getDynamicColumnsOrBuilder(
+          int index) {
+        if (dynamicColumnsBuilder_ == null) {
+          return dynamicColumns_.get(index);  } else {
+          return dynamicColumnsBuilder_.getMessageOrBuilder(index);
+        }
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public java.util.List<? extends org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder> 
+           getDynamicColumnsOrBuilderList() {
+        if (dynamicColumnsBuilder_ != null) {
+          return dynamicColumnsBuilder_.getMessageOrBuilderList();
+        } else {
+          return java.util.Collections.unmodifiableList(dynamicColumns_);
+        }
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder addDynamicColumnsBuilder() {
+        return getDynamicColumnsFieldBuilder().addBuilder(
+            org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.getDefaultInstance());
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder addDynamicColumnsBuilder(
+          int index) {
+        return getDynamicColumnsFieldBuilder().addBuilder(
+            index, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.getDefaultInstance());
+      }
+      /**
+       * <code>repeated .PColumn dynamicColumns = 1;</code>
+       */
+      public java.util.List<org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder> 
+           getDynamicColumnsBuilderList() {
+        return getDynamicColumnsFieldBuilder().getBuilderList();
+      }
+      private com.google.protobuf.RepeatedFieldBuilder<
+          org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder> 
+          getDynamicColumnsFieldBuilder() {
+        if (dynamicColumnsBuilder_ == null) {
+          dynamicColumnsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+              org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumn.Builder, org.apache.phoenix.coprocessor.generated.PTableProtos.PColumnOrBuilder>(
+                  dynamicColumns_,
+                  ((bitField0_ & 0x00000001) == 0x00000001),
+                  getParentForChildren(),
+                  isClean());
+          dynamicColumns_ = null;
+        }
+        return dynamicColumnsBuilder_;
+      }
+
+      // @@protoc_insertion_point(builder_scope:DynamicColumnMetaData)
+    }
+
+    static {
+      defaultInstance = new DynamicColumnMetaData(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:DynamicColumnMetaData)
+  }
+
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_DynamicColumnMetaData_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_DynamicColumnMetaData_fieldAccessorTable;
+
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\033DynamicColumnMetaData.proto\032\014PTable.pr" +
+      "oto\"9\n\025DynamicColumnMetaData\022 \n\016dynamicC" +
+      "olumns\030\001 \003(\0132\010.PColumnBO\n(org.apache.pho" +
+      "enix.coprocessor.generatedB\033DynamicColum" +
+      "nMetaDataProtosH\001\210\001\001\240\001\001"
+    };
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+        public com.google.protobuf.ExtensionRegistry assignDescriptors(
+            com.google.protobuf.Descriptors.FileDescriptor root) {
+          descriptor = root;
+          internal_static_DynamicColumnMetaData_descriptor =
+            getDescriptor().getMessageTypes().get(0);
+          internal_static_DynamicColumnMetaData_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_DynamicColumnMetaData_descriptor,
+              new java.lang.String[] { "DynamicColumns", });
+          return null;
+        }
+      };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+          org.apache.phoenix.coprocessor.generated.PTableProtos.getDescriptor(),
+        }, assigner);
+  }
+
+  // @@protoc_insertion_point(outer_class_scope)
+}
index 55dbfae..7674e4c 100644 (file)
@@ -214,7 +214,7 @@ public abstract class BaseQueryPlan implements QueryPlan {
 
     @Override
     public final ResultIterator iterator(ParallelScanGrouper scanGrouper, Scan scan) throws SQLException {
-        return iterator(Collections.<ImmutableBytesPtr,ServerCache>emptyMap(), scanGrouper, scan);
+        return iterator(Collections.emptyMap(), scanGrouper, scan);
     }
         
        private ResultIterator getWrappedIterator(final Map<ImmutableBytesPtr,ServerCache> dependencies,
index e3e0264..b5491aa 100644 (file)
@@ -162,8 +162,8 @@ public class CorrelatePlan extends DelegateQueryPlan {
                 try {
                     joined = rhsBitSet == ValueBitSet.EMPTY_VALUE_BITSET ?
                             current : TupleProjector.mergeProjectedValue(
-                                    convertLhs(current), joinedSchema, destBitSet,
-                                    rhsCurrent, rhsSchema, rhsBitSet, rhsFieldPosition, true);
+                                    convertLhs(current), destBitSet,
+                                    rhsCurrent, rhsBitSet, rhsFieldPosition, true);
                 } catch (IOException e) {
                     throw new SQLException(e);
                 }
index b39f64d..93ee43d 100644 (file)
@@ -22,6 +22,8 @@ import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_
 import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_BATCH_SIZE;
 import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_BYTES;
 import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_MUTATION_COMMIT_TIME;
+import static org.apache.phoenix.query.QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB;
+import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB;
 
 import java.io.IOException;
 import java.sql.SQLException;
@@ -507,7 +509,7 @@ public class MutationState implements SQLCloseable {
         final Iterator<PTable> indexes = indexList.iterator();
         final List<Mutation> mutationList = Lists.newArrayListWithExpectedSize(values.size());
         final List<Mutation> mutationsPertainingToIndex = indexes.hasNext() ? Lists
-                .<Mutation> newArrayListWithExpectedSize(values.size()) : null;
+                .newArrayListWithExpectedSize(values.size()) : null;
         generateMutations(tableRef, mutationTimestamp, serverTimestamp, values, mutationList,
                 mutationsPertainingToIndex);
         return new Iterator<Pair<PName, List<Mutation>>>() {
@@ -523,7 +525,7 @@ public class MutationState implements SQLCloseable {
             public Pair<PName, List<Mutation>> next() {
                 if (isFirst) {
                     isFirst = false;
-                    return new Pair<PName, List<Mutation>>(table.getPhysicalName(), mutationList);
+                    return new Pair<>(table.getPhysicalName(), mutationList);
                 }
 
                 PTable index = indexes.next();
@@ -595,6 +597,8 @@ public class MutationState implements SQLCloseable {
         Iterator<Map.Entry<ImmutableBytesPtr, RowMutationState>> iterator = values.entrySet().iterator();
         long timestampToUse = mutationTimestamp;
         MultiRowMutationState modifiedValues = new MultiRowMutationState(16);
+        boolean wildcardIncludesDynamicCols = connection.getQueryServices().getProps().getBoolean(
+                WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB, DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB);
         while (iterator.hasNext()) {
             Map.Entry<ImmutableBytesPtr, RowMutationState> rowEntry = iterator.next();
             byte[] onDupKeyBytes = rowEntry.getValue().getOnDupKeyBytes();
@@ -628,6 +632,9 @@ public class MutationState implements SQLCloseable {
                 for (Map.Entry<PColumn, byte[]> valueEntry : rowEntry.getValue().getColumnValues().entrySet()) {
                     row.setValue(valueEntry.getKey(), valueEntry.getValue());
                 }
+                if (wildcardIncludesDynamicCols && row.setAttributesForDynamicColumnsIfReqd()) {
+                    row.setAttributeToProcessDynamicColumnsMetadata();
+                }
                 rowMutations = row.toRowMutations();
                 // Pass through ON DUPLICATE KEY info through mutations
                 // In the case of the same clause being used on many statements, this will be
index d3fd3bc..e7966d9 100644 (file)
@@ -461,9 +461,8 @@ public class SortMergeJoinPlan implements QueryPlan {
                     
                 }
                 return rhsBitSet == ValueBitSet.EMPTY_VALUE_BITSET ?
-                        t : TupleProjector.mergeProjectedValue(
-                                t, joinedSchema, destBitSet,
-                                rhs, rhsSchema, rhsBitSet, rhsFieldPosition, true);
+                        t : TupleProjector.mergeProjectedValue(t, destBitSet,
+                                rhs, rhsBitSet, rhsFieldPosition, true);
             } catch (IOException e) {
                 throw new SQLException(e);
             }
index 753c11d..eeeea02 100644 (file)
  */
 package org.apache.phoenix.execute;
 
+import static org.apache.phoenix.coprocessor.ScanRegionObserver.WILDCARD_SCAN_INCLUDES_DYNAMIC_COLUMNS;
+import static org.apache.phoenix.coprocessor.ScanRegionObserver.DYN_COLS_METADATA_CELL_QUALIFIER;
 import static org.apache.phoenix.query.QueryConstants.VALUE_COLUMN_FAMILY;
 import static org.apache.phoenix.query.QueryConstants.VALUE_COLUMN_QUALIFIER;
+import static org.apache.phoenix.schema.types.PDataType.TRUE_BYTES;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
+import java.nio.ByteBuffer;
 import java.sql.SQLException;
 import java.util.Arrays;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
+import com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.phoenix.compile.ColumnProjector;
 import org.apache.phoenix.compile.RowProjector;
+import org.apache.phoenix.coprocessor.generated.PTableProtos;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.expression.ExpressionType;
+import org.apache.phoenix.expression.KeyValueColumnExpression;
 import org.apache.phoenix.schema.KeyValueSchema;
 import org.apache.phoenix.schema.KeyValueSchema.KeyValueSchemaBuilder;
 import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PColumnImpl;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.ProjectedColumn;
 import org.apache.phoenix.schema.ValueBitSet;
 import org.apache.phoenix.schema.tuple.BaseTuple;
 import org.apache.phoenix.schema.tuple.Tuple;
+import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.PhoenixKeyValueUtil;
+import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.SchemaUtil;
 
 import com.google.common.base.Preconditions;
@@ -112,7 +126,20 @@ public class TupleProjector {
         this.valueSet = bitSet;
     }
     
-    public static void serializeProjectorIntoScan(Scan scan, TupleProjector projector) {
+    public static void serializeProjectorIntoScan(Scan scan, TupleProjector projector,
+            boolean projectDynColsInWildcardQueries) {
+        scan.setAttribute(SCAN_PROJECTOR, serializeProjectorIntoBytes(projector));
+        if (projectDynColsInWildcardQueries) {
+            scan.setAttribute(WILDCARD_SCAN_INCLUDES_DYNAMIC_COLUMNS, TRUE_BYTES);
+        }
+    }
+
+    /**
+     * Serialize the projector into a byte array
+     * @param projector projector to serialize
+     * @return byte array
+     */
+    private static byte[] serializeProjectorIntoBytes(TupleProjector projector) {
         ByteArrayOutputStream stream = new ByteArrayOutputStream();
         try {
             DataOutputStream output = new DataOutputStream(stream);
@@ -120,10 +147,11 @@ public class TupleProjector {
             int count = projector.expressions.length;
             WritableUtils.writeVInt(output, count);
             for (int i = 0; i < count; i++) {
-               WritableUtils.writeVInt(output, ExpressionType.valueOf(projector.expressions[i]).ordinal());
-               projector.expressions[i].write(output);
+                WritableUtils.writeVInt(output,
+                        ExpressionType.valueOf(projector.expressions[i]).ordinal());
+                projector.expressions[i].write(output);
             }
-            scan.setAttribute(SCAN_PROJECTOR, stream.toByteArray());
+            return stream.toByteArray();
         } catch (IOException e) {
             throw new RuntimeException(e);
         } finally {
@@ -133,11 +161,18 @@ public class TupleProjector {
                 throw new RuntimeException(e);
             }
         }
-        
     }
     
     public static TupleProjector deserializeProjectorFromScan(Scan scan) {
-        byte[] proj = scan.getAttribute(SCAN_PROJECTOR);
+        return deserializeProjectorFromBytes(scan.getAttribute(SCAN_PROJECTOR));
+    }
+
+    /**
+     * Deserialize the byte array to form a projector
+     * @param proj byte array to deserialize
+     * @return projector
+     */
+    private static TupleProjector deserializeProjectorFromBytes(byte[] proj) {
         if (proj == null) {
             return null;
         }
@@ -149,9 +184,9 @@ public class TupleProjector {
             int count = WritableUtils.readVInt(input);
             Expression[] expressions = new Expression[count];
             for (int i = 0; i < count; i++) {
-               int ordinal = WritableUtils.readVInt(input);
-               expressions[i] = ExpressionType.values()[ordinal].newInstance();
-               expressions[i].readFields(input);
+                int ordinal = WritableUtils.readVInt(input);
+                expressions[i] = ExpressionType.values()[ordinal].newInstance();
+                expressions[i].readFields(input);
             }
             return new TupleProjector(schema, expressions);
         } catch (IOException e) {
@@ -164,6 +199,86 @@ public class TupleProjector {
             }
         }
     }
+
+    /**
+     * Iterate over the list of cells returned from the scan and return a tuple projector for the
+     * dynamic columns by parsing the metadata stored for the list of dynamic columns
+     * @param result list of cells
+     * @param dynCols list of dynamic columns to be populated
+     * @param dynColCells list of cells corresponding to dynamic columns to be populated
+     * @return The tuple projector corresponding to dynamic columns or null if there are no dynamic
+     * columns to process
+     * @throws InvalidProtocolBufferException Thrown if there is an error parsing byte[] to protobuf
+     */
+    public static TupleProjector getDynamicColumnsTupleProjector(List<Cell> result,
+            List<PColumn> dynCols, List<Cell> dynColCells) throws InvalidProtocolBufferException {
+        Set<Pair<ByteBuffer, ByteBuffer>> dynColCellQualifiers = new HashSet<>();
+        populateDynColsFromResult(result, dynCols, dynColCellQualifiers);
+        if (dynCols.isEmpty()) {
+            return null;
+        }
+        populateDynamicColumnCells(result, dynColCellQualifiers, dynColCells);
+        if (dynColCells.isEmpty()) {
+            return null;
+        }
+        KeyValueSchema dynColsSchema = PhoenixRuntime.buildKeyValueSchema(dynCols);
+        Expression[] expressions = new Expression[dynCols.size()];
+        for (int i = 0; i < dynCols.size(); i++) {
+            expressions[i] = new KeyValueColumnExpression(dynCols.get(i));
+        }
+        return new TupleProjector(dynColsSchema, expressions);
+    }
+
+    /**
+     * Populate cells corresponding to dynamic columns
+     * @param result list of cells
+     * @param dynColCellQualifiers Set of <column family, column qualifier> pairs corresponding to
+     *                             cells of dynamic columns
+     * @param dynColCells Populated list of cells corresponding to dynamic columns
+     */
+    private static void populateDynamicColumnCells(List<Cell> result,
+            Set<Pair<ByteBuffer, ByteBuffer>> dynColCellQualifiers, List<Cell> dynColCells) {
+        for (Cell c : result) {
+            Pair famQualPair = new Pair<>(ByteBuffer.wrap(CellUtil.cloneFamily(c)),
+                    ByteBuffer.wrap(CellUtil.cloneQualifier(c)));
+            if (dynColCellQualifiers.contains(famQualPair)) {
+                dynColCells.add(c);
+            }
+        }
+    }
+
+    /**
+     * Iterate over the list of cells and populate dynamic columns
+     * @param result list of cells
+     * @param dynCols Populated list of PColumns corresponding to dynamic columns
+     * @param dynColCellQualifiers Populated set of <column family, column qualifier> pairs
+     *                             for the cells in the list, which correspond to dynamic columns
+     * @throws InvalidProtocolBufferException Thrown if there is an error parsing byte[] to protobuf
+     */
+    private static void populateDynColsFromResult(List<Cell> result, List<PColumn> dynCols,
+            Set<Pair<ByteBuffer, ByteBuffer>> dynColCellQualifiers)
+    throws InvalidProtocolBufferException {
+        for (Cell c : result) {
+            byte[] qual = CellUtil.cloneQualifier(c);
+            byte[] fam = CellUtil.cloneFamily(c);
+            int index = Bytes.indexOf(qual, DYN_COLS_METADATA_CELL_QUALIFIER);
+
+            // Contains dynamic column metadata, so add it to the list of dynamic columns
+            if (index != -1) {
+                byte[] dynColMetaDataProto = CellUtil.cloneValue(c);
+                dynCols.add(PColumnImpl.createFromProto(
+                        PTableProtos.PColumn.parseFrom(dynColMetaDataProto)));
+                // Add the <fam, qualifier> pair for the actual dynamic column. The column qualifier
+                // of the dynamic column is got by parsing out the known bytes from the shadow cell
+                // containing the metadata for that column i.e.
+                // DYN_COLS_METADATA_CELL_QUALIFIER<actual column qualifier>
+                byte[] dynColQual = Arrays.copyOfRange(qual,
+                        index + DYN_COLS_METADATA_CELL_QUALIFIER.length, qual.length);
+                dynColCellQualifiers.add(
+                        new Pair<>(ByteBuffer.wrap(fam), ByteBuffer.wrap(dynColQual)));
+            }
+        }
+    }
     
     public static class ProjectedValueTuple extends BaseTuple {
         ImmutableBytesWritable keyPtr = new ImmutableBytesWritable();
@@ -208,6 +323,28 @@ public class TupleProjector {
         }
 
         @Override
+        public Cell mergeWithDynColsListBytesAndGetValue(int index, byte[] dynColsList) {
+            if (index != 0) {
+                throw new IndexOutOfBoundsException(Integer.toString(index));
+            }
+            if (dynColsList == null || dynColsList.length == 0) {
+                return getValue(VALUE_COLUMN_FAMILY, VALUE_COLUMN_QUALIFIER);
+            }
+            // We put the known reserved bytes before the serialized list of dynamic column
+            // PColumns to easily parse out the column list on the client
+            byte[] concatBytes = ByteUtil.concat(projectedValue.get(),
+                    DYN_COLS_METADATA_CELL_QUALIFIER, dynColsList);
+            ImmutableBytesWritable projectedValueWithDynColsListBytes =
+                    new ImmutableBytesWritable(concatBytes);
+            keyValue = PhoenixKeyValueUtil.newKeyValue(keyPtr.get(), keyPtr.getOffset(),
+                    keyPtr.getLength(), VALUE_COLUMN_FAMILY, VALUE_COLUMN_QUALIFIER, timestamp,
+                    projectedValueWithDynColsListBytes.get(),
+                    projectedValueWithDynColsListBytes.getOffset(),
+                    projectedValueWithDynColsListBytes.getLength());
+            return keyValue;
+        }
+
+        @Override
         public Cell getValue(int index) {
             if (index != 0) {
                 throw new IndexOutOfBoundsException(Integer.toString(index));
@@ -305,8 +442,9 @@ public class TupleProjector {
         if (!b) throw new IOException("Trying to decode a non-projected value.");
     }
     
-    public static ProjectedValueTuple mergeProjectedValue(ProjectedValueTuple dest, KeyValueSchema destSchema, ValueBitSet destBitSet,
-               Tuple src, KeyValueSchema srcSchema, ValueBitSet srcBitSet, int offset, boolean useNewValueColumnQualifier) throws IOException {
+    public static ProjectedValueTuple mergeProjectedValue(ProjectedValueTuple dest,
+            ValueBitSet destBitSet, Tuple src, ValueBitSet srcBitSet, int offset,
+            boolean useNewValueColumnQualifier) throws IOException {
        ImmutableBytesWritable destValue = dest.getProjectedValue();
         int origDestBitSetLen = dest.getBitSetLength();
        destBitSet.clear();
index e60bf00..52db52c 100644 (file)
@@ -80,6 +80,10 @@ public class ProjectedColumnExpression extends ColumnExpression {
     public int getPosition() {
        return position;
     }
+
+    public Collection<PColumn> getColumns() {
+           return columns;
+    }
     
     @Override
     public String toString() {
index c6967cb..7957774 100644 (file)
@@ -42,11 +42,11 @@ public abstract class KeyValueBuilder {
      * @throws RuntimeException if there is an IOException thrown from the underlying {@link Put}
      */
     @SuppressWarnings("javadoc")
-    public static void addQuietly(Mutation m, KeyValueBuilder builder, KeyValue kv) {
+    public static void addQuietly(Mutation m, KeyValue kv) {
         byte [] family = CellUtil.cloneFamily(kv);
         List<Cell> list = m.getFamilyCellMap().get(family);
         if (list == null) {
-            list = new ArrayList<Cell>();
+            list = new ArrayList<>();
             m.getFamilyCellMap().put(family, list);
         }
         list.add(kv);
index a30a86b..7fbb636 100644 (file)
@@ -23,6 +23,8 @@ import static org.apache.phoenix.coprocessor.BaseScannerRegionObserver.SCAN_STAR
 import static org.apache.phoenix.coprocessor.BaseScannerRegionObserver.SCAN_STOP_ROW_SUFFIX;
 import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_FAILED_QUERY_COUNTER;
 import static org.apache.phoenix.monitoring.GlobalClientMetrics.GLOBAL_QUERY_TIMEOUT_COUNTER;
+import static org.apache.phoenix.query.QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB;
+import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB;
 import static org.apache.phoenix.schema.PTable.IndexType.LOCAL;
 import static org.apache.phoenix.schema.PTableType.INDEX;
 import static org.apache.phoenix.util.ByteUtil.EMPTY_BYTE_ARRAY;
@@ -190,6 +192,9 @@ public abstract class BaseResultIterators extends ExplainTable implements Result
     private static void initializeScan(QueryPlan plan, Integer perScanLimit, Integer offset, Scan scan) throws SQLException {
         StatementContext context = plan.getContext();
         TableRef tableRef = plan.getTableRef();
+        boolean wildcardIncludesDynamicCols = context.getConnection().getQueryServices()
+                .getConfiguration().getBoolean(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB,
+                        DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB);
         PTable table = tableRef.getTable();
 
         Map<byte [], NavigableSet<byte []>> familyMap = scan.getFamilyMap();
@@ -208,7 +213,8 @@ public abstract class BaseResultIterators extends ExplainTable implements Result
             FilterableStatement statement = plan.getStatement();
             RowProjector projector = plan.getProjector();
             boolean optimizeProjection = false;
-            boolean keyOnlyFilter = familyMap.isEmpty() && context.getWhereConditionColumns().isEmpty();
+            boolean keyOnlyFilter = familyMap.isEmpty() && !wildcardIncludesDynamicCols &&
+                    context.getWhereConditionColumns().isEmpty();
             if (!projector.projectEverything()) {
                 // If nothing projected into scan and we only have one column family, just allow everything
                 // to be projected and use a FirstKeyOnlyFilter to skip from row to row. This turns out to
index 703c9dc..5ad72ed 100644 (file)
 
 package org.apache.phoenix.iterate;
 
+import static org.apache.phoenix.coprocessor.ScanRegionObserver.WILDCARD_SCAN_INCLUDES_DYNAMIC_COLUMNS;
+import static org.apache.phoenix.schema.types.PDataType.TRUE_BYTES;
+
 import com.google.common.collect.ImmutableList;
 
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
@@ -31,6 +36,8 @@ import org.apache.hadoop.hbase.regionserver.RegionScanner;
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
 import org.apache.hadoop.hbase.regionserver.ScannerContextUtil;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos;
 import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.expression.KeyValueColumnExpression;
@@ -38,17 +45,21 @@ import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
 import org.apache.phoenix.index.IndexMaintainer;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.KeyValueSchema;
-import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PColumnImpl;
 import org.apache.phoenix.schema.ValueBitSet;
-import org.apache.phoenix.schema.tuple.*;
+import org.apache.phoenix.schema.tuple.MultiKeyValueTuple;
+import org.apache.phoenix.schema.tuple.PositionBasedResultTuple;
+import org.apache.phoenix.schema.tuple.ResultTuple;
+import org.apache.phoenix.schema.tuple.Tuple;
 import org.apache.phoenix.transaction.PhoenixTransactionContext;
 import org.apache.phoenix.util.EncodedColumnsUtil;
 import org.apache.phoenix.util.IndexUtil;
 import org.apache.phoenix.util.ScanUtil;
 import org.apache.phoenix.util.ServerUtil;
-import org.apache.tephra.Transaction;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.ListIterator;
 import java.util.Set;
@@ -183,11 +194,17 @@ public abstract class RegionScannerFactory {
                 tupleProjector, dataRegion, indexMaintainer, viewConstants, ptr);
           }
           if (projector != null) {
-            Tuple toProject = useQualifierAsListIndex ? new PositionBasedResultTuple(result) : new ResultTuple(
-                Result.create(result));
-            Tuple tuple = projector.projectResults(toProject, useNewValueColumnQualifier);
+            Tuple toProject = useQualifierAsListIndex ? new PositionBasedResultTuple(result) :
+                    new ResultTuple(Result.create(result));
+
+            Pair<Tuple, byte[]> mergedTupleDynColsPair = getTupleWithDynColsIfRequired(result,
+                    projector.projectResults(toProject, useNewValueColumnQualifier));
+            Tuple tupleWithDynColsIfReqd = mergedTupleDynColsPair.getFirst();
+            byte[] serializedDynColsList = mergedTupleDynColsPair.getSecond();
+
             result.clear();
-            result.add(tuple.getValue(0));
+            result.add(tupleWithDynColsIfReqd.mergeWithDynColsListBytesAndGetValue(0,
+                    serializedDynColsList));
             if (arrayElementCell != null) {
               result.add(arrayElementCell);
             }
@@ -200,6 +217,59 @@ public abstract class RegionScannerFactory {
         }
       }
 
+      /**
+       * Iterate over the list of cells returned from the scan and use the dynamic column metadata
+       * to create a tuple projector for dynamic columns. Finally, merge this with the projected
+       * values corresponding to the known columns
+       * @param result list of cells returned from the scan
+       * @param tuple projected value tuple from known schema/columns
+       * @return A pair, whose first part is a combined projected value tuple containing the
+       * known column values along with resolved dynamic column values and whose second part is
+       * the serialized list of dynamic column PColumns. In case dynamic columns are not
+       * to be exposed or are not present, this returns the original tuple and an empty byte array.
+       * @throws IOException Thrown if there is an error parsing protobuf or merging projected
+       * values
+       */
+      private Pair<Tuple, byte[]> getTupleWithDynColsIfRequired(List<Cell> result, Tuple tuple)
+        throws IOException {
+        // We only care about dynamic column cells if the scan has this attribute set
+        if (Bytes.equals(scan.getAttribute(WILDCARD_SCAN_INCLUDES_DYNAMIC_COLUMNS), TRUE_BYTES)) {
+          List<PColumn> dynCols = new ArrayList<>();
+          List<Cell> dynColCells = new ArrayList<>();
+          TupleProjector dynColTupleProj = TupleProjector.getDynamicColumnsTupleProjector(result,
+              dynCols, dynColCells);
+          if (dynColTupleProj != null) {
+            Tuple toProject = useQualifierAsListIndex ? new PositionBasedResultTuple(dynColCells) :
+                new ResultTuple(Result.create(dynColCells));
+            Tuple dynColsProjectedTuple = dynColTupleProj
+                .projectResults(toProject, useNewValueColumnQualifier);
+
+            ValueBitSet destBitSet = projector.getValueBitSet();
+            // In case we are not projecting any non-row key columns, the field count for the
+            // current projector will be 0, so we simply use the dynamic column projector's
+            // value bitset as the destination bitset.
+            if (projector.getSchema().getFieldCount() == 0) {
+              destBitSet = dynColTupleProj.getValueBitSet();
+            }
+            // Add dynamic column data at the end of the projected tuple
+            Tuple mergedTuple = TupleProjector.mergeProjectedValue(
+                (TupleProjector.ProjectedValueTuple)tuple, destBitSet, dynColsProjectedTuple,
+                dynColTupleProj.getValueBitSet(), projector.getSchema().getFieldCount(),
+                useNewValueColumnQualifier);
+
+            // We send the serialized list of PColumns for dynamic columns back to the client
+            // so that the client can process the corresponding projected values
+            DynamicColumnMetaDataProtos.DynamicColumnMetaData.Builder dynColsListBuilder =
+                DynamicColumnMetaDataProtos.DynamicColumnMetaData.newBuilder();
+            for (PColumn dynCol : dynCols) {
+              dynColsListBuilder.addDynamicColumns(PColumnImpl.toProto(dynCol));
+            }
+            return new Pair<>(mergedTuple, dynColsListBuilder.build().toByteArray());
+          }
+        }
+        return new Pair<>(tuple, new byte[0]);
+      }
+
       @Override
       public boolean nextRaw(List<Cell> result, ScannerContext scannerContext)
           throws IOException {
index 84816a0..b99ece6 100644 (file)
  */
 package org.apache.phoenix.jdbc;
 
+import static org.apache.phoenix.coprocessor.ScanRegionObserver.DYN_COLS_METADATA_CELL_QUALIFIER;
+import static org.apache.phoenix.query.QueryServices.WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB;
+import static org.apache.phoenix.query.QueryServicesOptions.DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB;
+
 import java.io.InputStream;
 import java.io.Reader;
 import java.math.BigDecimal;
@@ -38,17 +42,32 @@ import java.sql.SQLXML;
 import java.sql.Time;
 import java.sql.Timestamp;
 import java.text.Format;
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Calendar;
+import java.util.List;
 import java.util.Map;
 
+import com.google.common.primitives.Bytes;
+import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.compile.ColumnProjector;
+import org.apache.phoenix.compile.ExpressionProjector;
 import org.apache.phoenix.compile.RowProjector;
 import org.apache.phoenix.compile.StatementContext;
+import org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos;
+import org.apache.phoenix.coprocessor.generated.PTableProtos;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
+import org.apache.phoenix.execute.TupleProjector;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.expression.ProjectedColumnExpression;
 import org.apache.phoenix.iterate.ResultIterator;
 import org.apache.phoenix.log.QueryLogInfo;
 import org.apache.phoenix.log.QueryLogger;
@@ -56,6 +75,8 @@ import org.apache.phoenix.log.QueryStatus;
 import org.apache.phoenix.monitoring.MetricType;
 import org.apache.phoenix.monitoring.OverAllQueryMetrics;
 import org.apache.phoenix.monitoring.ReadMetricQueue;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PColumnImpl;
 import org.apache.phoenix.schema.tuple.ResultTuple;
 import org.apache.phoenix.schema.tuple.Tuple;
 import org.apache.phoenix.schema.types.PBoolean;
@@ -76,8 +97,7 @@ import org.apache.phoenix.util.SQLCloseable;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Throwables;
-
-
+import org.apache.phoenix.util.SchemaUtil;
 
 /**
  *
@@ -121,7 +141,11 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     private final ReadMetricQueue readMetricsQueue;
     private final OverAllQueryMetrics overAllQueryMetrics;
     private final ImmutableBytesWritable ptr = new ImmutableBytesWritable();
+    private final boolean wildcardIncludesDynamicCols;
+    private final List<PColumn> staticColumns;
+    private final int startPositionForDynamicCols;
 
+    private RowProjector rowProjectorWithDynamicCols;
     private Tuple currentRow = BEFORE_FIRST;
     private boolean isClosed = false;
     private boolean wasNull = false;
@@ -133,9 +157,8 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
 
     private Object exception;
 
-
-    
-    public PhoenixResultSet(ResultIterator resultIterator, RowProjector rowProjector, StatementContext ctx) throws SQLException {
+    public PhoenixResultSet(ResultIterator resultIterator, RowProjector rowProjector,
+            StatementContext ctx) throws SQLException {
         this.rowProjector = rowProjector;
         this.scanner = resultIterator;
         this.context = ctx;
@@ -143,6 +166,17 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
         this.readMetricsQueue = context.getReadMetricsQueue();
         this.overAllQueryMetrics = context.getOverallQueryMetrics();
         this.queryLogger = context.getQueryLogger() != null ? context.getQueryLogger() : QueryLogger.NO_OP_INSTANCE;
+        this.wildcardIncludesDynamicCols = this.context.getConnection().getQueryServices()
+                .getConfiguration().getBoolean(WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB,
+                        DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB);
+        if (this.wildcardIncludesDynamicCols) {
+            Pair<List<PColumn>, Integer> res = getStaticColsAndStartingPosForDynCols();
+            this.staticColumns = res.getFirst();
+            this.startPositionForDynamicCols = res.getSecond();
+        } else {
+            this.staticColumns = null;
+            this.startPositionForDynamicCols = 0;
+        }
     }
     
     @Override
@@ -202,7 +236,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
 
     @Override
     public int findColumn(String columnLabel) throws SQLException {
-        Integer index = rowProjector.getColumnIndex(columnLabel);
+        Integer index = getRowProjector().getColumnIndex(columnLabel);
         return index + 1;
     }
 
@@ -216,7 +250,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
        checkCursorState();
         // Get the value using the expected type instead of trying to coerce to VARCHAR.
         // We can't coerce using our formatter because we don't have enough context in PDataType.
-       ColumnProjector projector = rowProjector.getColumnProjector(columnIndex-1);
+        ColumnProjector projector = getRowProjector().getColumnProjector(columnIndex-1);
         Array value = (Array)projector.getValue(currentRow, projector.getExpression().getDataType(), ptr);
         wasNull = (value == null);
         return value;
@@ -255,8 +289,8 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public BigDecimal getBigDecimal(int columnIndex) throws SQLException {
         checkCursorState();
-        BigDecimal value = (BigDecimal)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
-            PDecimal.INSTANCE, ptr);
+        BigDecimal value = (BigDecimal)getRowProjector().getColumnProjector(columnIndex-1)
+                .getValue(currentRow, PDecimal.INSTANCE, ptr);
         wasNull = (value == null);
         return value;
     }
@@ -303,7 +337,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public boolean getBoolean(int columnIndex) throws SQLException {
         checkCursorState();
-        ColumnProjector colProjector = rowProjector.getColumnProjector(columnIndex-1);
+        ColumnProjector colProjector = getRowProjector().getColumnProjector(columnIndex-1);
         PDataType type = colProjector.getExpression().getDataType();
         Object value = colProjector.getValue(currentRow, type, ptr);
         wasNull = (value == null);
@@ -332,8 +366,8 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public byte[] getBytes(int columnIndex) throws SQLException {
         checkCursorState();
-        byte[] value = (byte[])rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
-            PVarbinary.INSTANCE, ptr);
+        byte[] value = (byte[])getRowProjector().getColumnProjector(columnIndex-1)
+                .getValue(currentRow, PVarbinary.INSTANCE, ptr);
         wasNull = (value == null);
         return value;
     }
@@ -347,7 +381,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     public byte getByte(int columnIndex) throws SQLException {
 //        throw new SQLFeatureNotSupportedException();
         checkCursorState();
-        Byte value = (Byte)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
+        Byte value = (Byte)getRowProjector().getColumnProjector(columnIndex-1).getValue(currentRow,
             PTinyint.INSTANCE, ptr);
         wasNull = (value == null);
         if (value == null) {
@@ -394,7 +428,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public Date getDate(int columnIndex) throws SQLException {
         checkCursorState();
-        Date value = (Date)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
+        Date value = (Date)getRowProjector().getColumnProjector(columnIndex-1).getValue(currentRow,
             PDate.INSTANCE, ptr);
         wasNull = (value == null);
         if (value == null) {
@@ -411,7 +445,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public Date getDate(int columnIndex, Calendar cal) throws SQLException {
         checkCursorState();
-        Date value = (Date)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
+        Date value = (Date)getRowProjector().getColumnProjector(columnIndex-1).getValue(currentRow,
             PDate.INSTANCE, ptr);
         wasNull = (value == null);
         if (wasNull) {
@@ -429,8 +463,8 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public double getDouble(int columnIndex) throws SQLException {
         checkCursorState();
-        Double value = (Double)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
-            PDouble.INSTANCE, ptr);
+        Double value = (Double)getRowProjector().getColumnProjector(columnIndex-1)
+                .getValue(currentRow, PDouble.INSTANCE, ptr);
         wasNull = (value == null);
         if (value == null) {
             return 0;
@@ -456,8 +490,8 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public float getFloat(int columnIndex) throws SQLException {
         checkCursorState();
-        Float value = (Float)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
-            PFloat.INSTANCE, ptr);
+        Float value = (Float)getRowProjector().getColumnProjector(columnIndex-1)
+                .getValue(currentRow, PFloat.INSTANCE, ptr);
         wasNull = (value == null);
         if (value == null) {
             return 0;
@@ -478,8 +512,8 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public int getInt(int columnIndex) throws SQLException {
         checkCursorState();
-        Integer value = (Integer)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
-            PInteger.INSTANCE, ptr);
+        Integer value = (Integer)getRowProjector().getColumnProjector(columnIndex-1)
+                .getValue(currentRow, PInteger.INSTANCE, ptr);
         wasNull = (value == null);
         if (value == null) {
             return 0;
@@ -495,7 +529,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public long getLong(int columnIndex) throws SQLException {
         checkCursorState();
-        Long value = (Long)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
+        Long value = (Long)getRowProjector().getColumnProjector(columnIndex-1).getValue(currentRow,
             PLong.INSTANCE, ptr);
         wasNull = (value == null);
         if (value == null) {
@@ -511,7 +545,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
 
     @Override
     public ResultSetMetaData getMetaData() throws SQLException {
-        return new PhoenixResultSetMetaData(statement.getConnection(), rowProjector);
+        return new PhoenixResultSetMetaData(statement.getConnection(), getRowProjector());
     }
 
     @Override
@@ -547,7 +581,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public Object getObject(int columnIndex) throws SQLException {
         checkCursorState();
-        ColumnProjector projector = rowProjector.getColumnProjector(columnIndex-1);
+        ColumnProjector projector = getRowProjector().getColumnProjector(columnIndex-1);
         Object value = projector.getValue(currentRow, projector.getExpression().getDataType(), ptr);
         wasNull = (value == null);
         return value;
@@ -607,7 +641,8 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public short getShort(int columnIndex) throws SQLException {
         checkCursorState();
-        Short value = (Short)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, PSmallint.INSTANCE, ptr);
+        Short value = (Short)getRowProjector().getColumnProjector(columnIndex-1)
+                .getValue(currentRow, PSmallint.INSTANCE, ptr);
         wasNull = (value == null);
         if (value == null) {
             return 0;
@@ -630,7 +665,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
         checkCursorState();
         // Get the value using the expected type instead of trying to coerce to VARCHAR.
         // We can't coerce using our formatter because we don't have enough context in PDataType.
-        ColumnProjector projector = rowProjector.getColumnProjector(columnIndex-1);
+        ColumnProjector projector = getRowProjector().getColumnProjector(columnIndex-1);
         PDataType type = projector.getExpression().getDataType();
         Object value = projector.getValue(currentRow,type, ptr);
         if (wasNull = (value == null)) {
@@ -651,7 +686,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public Time getTime(int columnIndex) throws SQLException {
         checkCursorState();
-        Time value = (Time)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
+        Time value = (Time)getRowProjector().getColumnProjector(columnIndex-1).getValue(currentRow,
             PTime.INSTANCE, ptr);
         wasNull = (value == null);
         return value;
@@ -665,7 +700,7 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public Time getTime(int columnIndex, Calendar cal) throws SQLException {
         checkCursorState();
-        Time value = (Time)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
+        Time value = (Time)getRowProjector().getColumnProjector(columnIndex-1).getValue(currentRow,
             PTime.INSTANCE, ptr);
         wasNull = (value == null);
         if (value == null) {
@@ -684,8 +719,8 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public Timestamp getTimestamp(int columnIndex) throws SQLException {
         checkCursorState();
-        Timestamp value = (Timestamp)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow,
-            PTimestamp.INSTANCE, ptr);
+        Timestamp value = (Timestamp)getRowProjector().getColumnProjector(columnIndex-1)
+                .getValue(currentRow, PTimestamp.INSTANCE, ptr);
         wasNull = (value == null);
         return value;
     }
@@ -713,7 +748,8 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
     @Override
     public URL getURL(int columnIndex) throws SQLException {
         checkCursorState();
-        String value = (String)rowProjector.getColumnProjector(columnIndex-1).getValue(currentRow, PVarchar.INSTANCE, ptr);
+        String value = (String)getRowProjector().getColumnProjector(columnIndex-1)
+                .getValue(currentRow, PVarchar.INSTANCE, ptr);
         wasNull = (value == null);
         if (value == null) {
             return null;
@@ -807,8 +843,16 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
                 close();
             }else{
                 count++;
+                // Reset this projector with each row
+                if (this.rowProjectorWithDynamicCols != null) {
+                    this.rowProjectorWithDynamicCols = null;
+                }
+                processDynamicColumnsIfRequired();
             }
             rowProjector.reset();
+            if (rowProjectorWithDynamicCols != null) {
+                rowProjectorWithDynamicCols.reset();
+            }
         } catch (RuntimeException e) {
             // FIXME: Expression.evaluate does not throw SQLException
             // so this will unwrap throws from that.
@@ -1354,4 +1398,150 @@ public class PhoenixResultSet implements ResultSet, SQLCloseable {
         return context;
     }
 
+    /**
+     * Return the row projector to use
+     * @return the row projector including dynamic column projectors in case we are including
+     * dynamic columns, otherwise the regular row projector containing static column projectors
+     */
+    private RowProjector getRowProjector() {
+        if (this.rowProjectorWithDynamicCols != null) {
+            return this.rowProjectorWithDynamicCols;
+        }
+        return this.rowProjector;
+    }
+
+    /**
+     * Populate the static columns and the starting position for dynamic columns which we use when
+     * merging column projectors of static and dynamic columns
+     * @return Pair whose first part is the list of static column PColumns and the second part is
+     * the starting position for dynamic columns
+     */
+    private Pair<List<PColumn>, Integer> getStaticColsAndStartingPosForDynCols(){
+        List<PColumn> staticCols = new ArrayList<>();
+        for (ColumnProjector cp : this.rowProjector.getColumnProjectors()) {
+            Expression exp = cp.getExpression();
+            if (exp instanceof ProjectedColumnExpression) {
+                staticCols.addAll(((ProjectedColumnExpression) exp).getColumns());
+                break;
+            }
+        }
+        int startingPosForDynCols = 0;
+        for (PColumn col : staticCols) {
+            if (!SchemaUtil.isPKColumn(col)) {
+                startingPosForDynCols++;
+            }
+        }
+        return new Pair<>(staticCols, startingPosForDynCols);
+    }
+
+    /**
+     * Process the dynamic column metadata for the current row and store the complete projector for
+     * all static and dynamic columns for this row
+     */
+    private void processDynamicColumnsIfRequired() {
+        if (!this.wildcardIncludesDynamicCols || this.currentRow == null ||
+                !this.rowProjector.projectDynColsInWildcardQueries()) {
+            return;
+        }
+        List<PColumn> dynCols = getDynColsListAndSeparateFromActualData();
+        if (dynCols == null) {
+            return;
+        }
+
+        RowProjector rowProjectorWithDynamicColumns = null;
+        if (this.rowProjector.getColumnCount() > 0 &&
+                dynCols.size() > 0) {
+            rowProjectorWithDynamicColumns = mergeRowProjectorWithDynColProjectors(dynCols,
+                            this.rowProjector.getColumnProjector(0).getTableName());
+        }
+        // Set the combined row projector
+        if (rowProjectorWithDynamicColumns != null) {
+            this.rowProjectorWithDynamicCols = rowProjectorWithDynamicColumns;
+        }
+    }
+
+    /**
+     * Separate the actual cell data from the serialized list of dynamic column PColumns and
+     * return the deserialized list of dynamic column PColumns for the current row
+     * @return Deserialized list of dynamic column PColumns or null if there are no dynamic columns
+     */
+    private List<PColumn> getDynColsListAndSeparateFromActualData() {
+        Cell base = this.currentRow.getValue(0);
+        final byte[] valueArray = CellUtil.cloneValue(base);
+        // We inserted the known byte array before appending the serialized list of dynamic columns
+        final byte[] anchor = Arrays.copyOf(DYN_COLS_METADATA_CELL_QUALIFIER,
+                DYN_COLS_METADATA_CELL_QUALIFIER.length);
+        // Reverse the arrays to find the last occurrence of the sub-array in the value array
+        ArrayUtils.reverse(valueArray);
+        ArrayUtils.reverse(anchor);
+        final int pos = valueArray.length - Bytes.indexOf(valueArray, anchor);
+        // There are no dynamic columns to process so return immediately
+        if (pos >= valueArray.length) {
+            return null;
+        }
+        ArrayUtils.reverse(valueArray);
+
+        // Separate the serialized list of dynamic column PColumns from the actual cell data
+        byte[] actualCellDataBytes = Arrays.copyOfRange(valueArray, 0,
+                pos - DYN_COLS_METADATA_CELL_QUALIFIER.length);
+        ImmutableBytesWritable actualCellData = new ImmutableBytesWritable(actualCellDataBytes);
+        ImmutableBytesWritable key = new ImmutableBytesWritable();
+        currentRow.getKey(key);
+        // Store only the actual cell data as part of the current row
+        this.currentRow = new TupleProjector.ProjectedValueTuple(key.get(), key.getOffset(),
+                key.getLength(), base.getTimestamp(),
+                actualCellData.get(), actualCellData.getOffset(), actualCellData.getLength(), 0);
+
+        byte[] dynColsListBytes = Arrays.copyOfRange(valueArray, pos, valueArray.length);
+        List<PColumn> dynCols = new ArrayList<>();
+        try {
+            List<PTableProtos.PColumn> dynColsProtos = DynamicColumnMetaDataProtos
+                    .DynamicColumnMetaData.parseFrom(dynColsListBytes).getDynamicColumnsList();
+            for (PTableProtos.PColumn colProto : dynColsProtos) {
+                dynCols.add(PColumnImpl.createFromProto(colProto));
+            }
+        } catch (InvalidProtocolBufferException e) {
+            return null;
+        }
+        return dynCols;
+    }
+
+    /**
+     * Add the dynamic column projectors at the end of the current row's row projector
+     * @param dynCols list of dynamic column PColumns for the current row
+     * @param tableName table name
+     * @return The combined row projector containing column projectors for both static and dynamic
+     * columns
+     */
+    private RowProjector mergeRowProjectorWithDynColProjectors(List<PColumn> dynCols,
+            String tableName) {
+        List<ColumnProjector> allColumnProjectors =
+                new ArrayList<>(this.rowProjector.getColumnProjectors());
+        List<PColumn> allCols = new ArrayList<>();
+        if (this.staticColumns != null) {
+            allCols.addAll(this.staticColumns);
+        }
+        // Add dynamic columns to the end
+        allCols.addAll(dynCols);
+
+        int startingPos = this.startPositionForDynamicCols;
+        // Get the ProjectedColumnExpressions for dynamic columns
+        for (PColumn currentDynCol : dynCols) {
+            // Note that we refer to all the existing static columns along with all dynamic columns
+            // in each of the newly added dynamic column projectors.
+            // This is required for correctly building the schema for each of the dynamic columns
+            Expression exp = new ProjectedColumnExpression(currentDynCol, allCols,
+                    startingPos++, currentDynCol.getName().getString());
+
+            ColumnProjector dynColProj = new ExpressionProjector(
+                    currentDynCol.getName().getString(), tableName, exp, false);
+            allColumnProjectors.add(dynColProj);
+        }
+
+        return new RowProjector(allColumnProjectors, this.rowProjector.getEstimatedRowByteSize(),
+                this.rowProjector.projectEveryRow(), this.rowProjector.hasUDFs(),
+                this.rowProjector.projectEverything(),
+                this.rowProjector.projectDynColsInWildcardQueries());
+    }
+
 }
index 8b8133e..7e76d10 100644 (file)
@@ -487,7 +487,10 @@ public class PhoenixStatement implements Statement, SQLCloseable {
                 select = StatementNormalizer.normalize(transformedSelect, resolver);
             }
 
-            QueryPlan plan = new QueryCompiler(stmt, select, resolver, Collections.<PDatum>emptyList(), stmt.getConnection().getIteratorFactory(), new SequenceManager(stmt), true, false, null).compile();
+            QueryPlan plan = new QueryCompiler(stmt, select, resolver, Collections.emptyList(),
+                    stmt.getConnection().getIteratorFactory(), new SequenceManager(stmt),
+                    true, false, null)
+                    .compile();
             plan.getContext().getSequenceManager().validateSequences(seqAction);
             return plan;
         }
index f33fb81..1f5cd48 100644 (file)
@@ -934,6 +934,7 @@ public class ConnectionQueryServicesImpl extends DelegateQueryServices implement
                     }
                 }
             }
+
             if ((SchemaUtil.isStatsTable(tableName) || SchemaUtil.isMetaTable(tableName))
                     && !newDesc.hasCoprocessor(MultiRowMutationEndpoint.class.getName())) {
                 builder.addCoprocessor(MultiRowMutationEndpoint.class.getName(),
index 23cf292..e279f05 100644 (file)
@@ -316,6 +316,8 @@ public interface QueryServices extends SQLCloseable {
     // Whether to enable cost-based-decision in the query optimizer
     public static final String COST_BASED_OPTIMIZER_ENABLED = "phoenix.costbased.optimizer.enabled";
     public static final String SMALL_SCAN_THRESHOLD_ATTRIB = "phoenix.query.smallScanThreshold";
+    public static final String WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB =
+            "phoenix.query.wildcard.dynamicColumns";
     public static final String LOG_LEVEL = "phoenix.log.level";
     public static final String LOG_BUFFER_SIZE = "phoenix.log.buffer.size";
     public static final String LOG_BUFFER_WAIT_STRATEGY = "phoenix.log.wait.strategy";
index c507630..b8cfe1f 100644 (file)
@@ -369,6 +369,7 @@ public class QueryServicesOptions {
     public static final boolean DEFAULT_ENABLE_SERVER_SIDE_MUTATIONS = true;
 
     public static final boolean DEFAULT_COST_BASED_OPTIMIZER_ENABLED = false;
+    public static final boolean DEFAULT_WILDCARD_QUERY_DYNAMIC_COLS_ATTRIB = false;
     public static final String DEFAULT_LOGGING_LEVEL = LogLevel.OFF.name();
     public static final String DEFAULT_LOG_SAMPLE_RATE = "1.0";
     public static final int DEFAULT_LOG_SALT_BUCKETS = 32;
index fde83ba..6dce7df 100644 (file)
@@ -23,6 +23,8 @@ import java.util.Map;
 import org.apache.hadoop.hbase.client.Mutation;
 
 import com.google.common.collect.ImmutableMap;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
 
 /**
  * 
@@ -55,7 +57,30 @@ public interface PRow {
      * constraint
      */
     public void setValue(PColumn col, byte[] value);
-    
+
+    /**
+     * Set attributes for the Put operations involving dynamic columns. These attributes are
+     * persisted as cells under a reserved qualifier for the dynamic column metadata so that we
+     * can resolve them for wildcard queries without requiring the user to provide the data type
+     * of the dynamic columns. See PHOENIX-374
+     * @return true if attributes for dynamic columns are added, otherwise false
+     */
+    public boolean setAttributesForDynamicColumnsIfReqd();
+
+    /**
+     * Set an attribute to indicate that we must process dynamic column metadata for the mutation.
+     * This is set if the configuration for supporting dynamic columns in wildcard queries is on
+     * and there are actually dynamic columns for which we need to add metadata.
+     * In case of old clients or for clients where this configuration is off, or for clients where
+     * this configuration is on and there are no dynamic columns to process in the mutation, this
+     * attribute will not be set.
+     * If this attribute is not set, we can avoid unnecessary iterations over each mutation's
+     * column families. See
+     * {@link org.apache.phoenix.coprocessor.ScanRegionObserver#preBatchMutate(ObserverContext,
+     * MiniBatchOperationInProgress)}
+     */
+    public void setAttributeToProcessDynamicColumnsMetadata();
+
     /**
      * Delete the row. Note that a delete take precedence over any
      * values that may have been set before or after the delete call.
index 6143bac..a7936e0 100644 (file)
  */
 package org.apache.phoenix.schema;
 
+import static org.apache.phoenix.coprocessor.ScanRegionObserver.DYNAMIC_COLUMN_METADATA_STORED_FOR_MUTATION;
 import static org.apache.phoenix.hbase.index.util.KeyValueBuilder.addQuietly;
 import static org.apache.phoenix.hbase.index.util.KeyValueBuilder.deleteQuietly;
 import static org.apache.phoenix.schema.SaltingUtil.SALTING_COLUMN;
+import static org.apache.phoenix.schema.types.PDataType.TRUE_BYTES;
 
 import java.io.IOException;
 import java.sql.DriverManager;
@@ -45,12 +47,15 @@ import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
 import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.compile.ExpressionCompiler;
 import org.apache.phoenix.compile.StatementContext;
+import org.apache.phoenix.coprocessor.generated.DynamicColumnMetaDataProtos;
 import org.apache.phoenix.coprocessor.generated.PTableProtos;
 import org.apache.phoenix.exception.DataExceedsCapacityException;
 import org.apache.phoenix.expression.Expression;
@@ -1124,9 +1129,13 @@ public class PTableImpl implements PTable {
         private final long ts;
         private final boolean hasOnDupKey;
         // map from column name to value 
-        private Map<PColumn, byte[]> columnToValueMap; 
+        private Map<PColumn, byte[]> columnToValueMap;
+        // Map from the column family name to the list of dynamic columns in that column family.
+        // If there are no dynamic columns in a column family, the key for that column family
+        // will not exist in the map, rather than the corresponding value being an empty list.
+        private Map<String, List<PColumn>> colFamToDynamicColumnsMapping;
 
-        public PRowImpl(KeyValueBuilder kvBuilder, ImmutableBytesWritable key, long ts, Integer bucketNum, boolean hasOnDupKey) {
+        PRowImpl(KeyValueBuilder kvBuilder, ImmutableBytesWritable key, long ts, Integer bucketNum, boolean hasOnDupKey) {
             this.kvBuilder = kvBuilder;
             this.ts = ts;
             this.hasOnDupKey = hasOnDupKey;
@@ -1138,6 +1147,7 @@ public class PTableImpl implements PTable {
                 this.key = ByteUtil.copyKeyBytesIfNecessary(key);
             }
             this.columnToValueMap = Maps.newHashMapWithExpectedSize(1);
+            this.colFamToDynamicColumnsMapping = Maps.newHashMapWithExpectedSize(1);
             newMutations();
         }
 
@@ -1188,16 +1198,21 @@ public class PTableImpl implements PTable {
                         ImmutableBytesWritable ptr = new ImmutableBytesWritable();
                         singleCellConstructorExpression.evaluate(null, ptr);
                         ImmutableBytesPtr colFamilyPtr = new ImmutableBytesPtr(columnFamily);
-                        addQuietly(put, kvBuilder, kvBuilder.buildPut(keyPtr,
+                        addQuietly(put, kvBuilder.buildPut(keyPtr,
                             colFamilyPtr, QueryConstants.SINGLE_KEYVALUE_COLUMN_QUALIFIER_BYTES_PTR, ts, ptr));
                     }
+                    // Preserve the attributes of the original mutation
+                    Map<String, byte[]> attrsMap = setValues.getAttributesMap();
                     setValues = put;
+                    for (String attrKey : attrsMap.keySet()) {
+                        setValues.setAttribute(attrKey, attrsMap.get(attrKey));
+                    }
                 }
                 // Because we cannot enforce a not null constraint on a KV column (since we don't know if the row exists when
                 // we upsert it), so instead add a KV that is always empty. This allows us to imitate SQL semantics given the
                 // way HBase works.
                 Pair<byte[], byte[]> emptyKvInfo = EncodedColumnsUtil.getEmptyKeyValueInfo(PTableImpl.this);
-                addQuietly(setValues, kvBuilder, kvBuilder.buildPut(keyPtr,
+                addQuietly(setValues, kvBuilder.buildPut(keyPtr,
                     SchemaUtil.getEmptyColumnFamilyPtr(PTableImpl.this),
                     new ImmutableBytesPtr(emptyKvInfo.getFirst()), ts,
                     new ImmutableBytesPtr(emptyKvInfo.getSecond())));
@@ -1270,11 +1285,50 @@ public class PTableImpl implements PTable {
                 }
                 else {
                     removeIfPresent(unsetValues, family, qualifier);
-                    addQuietly(setValues, kvBuilder, kvBuilder.buildPut(keyPtr,
+                    addQuietly(setValues, kvBuilder.buildPut(keyPtr,
                         column.getFamilyName().getBytesPtr(), qualifierPtr,
                         ts, ptr));
                 }
+                String fam = Bytes.toString(family);
+                if (column.isDynamic()) {
+                    this.colFamToDynamicColumnsMapping.putIfAbsent(fam, new ArrayList<>());
+                    this.colFamToDynamicColumnsMapping.get(fam).add(column);
+                }
+            }
+        }
+
+        /**
+         * Add attributes to the Put mutations indicating that we need to add shadow cells to Puts
+         * to store dynamic column metadata. See
+         * {@link org.apache.phoenix.coprocessor.ScanRegionObserver#preBatchMutate(ObserverContext,
+         * MiniBatchOperationInProgress)}
+         */
+        public boolean setAttributesForDynamicColumnsIfReqd() {
+            if (this.colFamToDynamicColumnsMapping == null ||
+                    this.colFamToDynamicColumnsMapping.isEmpty()) {
+                return false;
+            }
+            boolean attrsForDynColsSet = false;
+            for (Entry<String, List<PColumn>> colFamToDynColsList :
+                    this.colFamToDynamicColumnsMapping.entrySet()) {
+                DynamicColumnMetaDataProtos.DynamicColumnMetaData.Builder builder =
+                        DynamicColumnMetaDataProtos.DynamicColumnMetaData.newBuilder();
+                for (PColumn dynCol : colFamToDynColsList.getValue()) {
+                    builder.addDynamicColumns(PColumnImpl.toProto(dynCol));
+                }
+                if (builder.getDynamicColumnsCount() != 0) {
+                    // The attribute key is the column family name and the value is the
+                    // serialized list of dynamic columns
+                    setValues.setAttribute(colFamToDynColsList.getKey(),
+                            builder.build().toByteArray());
+                    attrsForDynColsSet = true;
+                }
             }
+            return attrsForDynColsSet;
+        }
+
+        @Override public void setAttributeToProcessDynamicColumnsMetadata() {
+            setValues.setAttribute(DYNAMIC_COLUMN_METADATA_STORED_FOR_MUTATION, TRUE_BYTES);
         }
 
         @Override
index 8028eb2..058c0e4 100644 (file)
@@ -38,6 +38,11 @@ public abstract class BaseTuple implements Tuple {
     public void getKey(ImmutableBytesWritable ptr) {
         throw new UnsupportedOperationException();
     }
+
+    @Override
+    public Cell mergeWithDynColsListBytesAndGetValue(int index, byte[] dynColsList) {
+        throw new UnsupportedOperationException();
+    }
     
     @Override
     public Cell getValue(int index) {
index 3430f5b..7cd3acc 100644 (file)
@@ -45,6 +45,11 @@ public class DelegateTuple implements Tuple {
     }
 
     @Override
+    public Cell mergeWithDynColsListBytesAndGetValue(int index, byte[] dynColsList) {
+        return delegate.mergeWithDynColsListBytesAndGetValue(index, dynColsList);
+    }
+
+    @Override
     public Cell getValue(int index) {
         return delegate.getValue(index);
     }
index e4a887b..d42cd2d 100644 (file)
@@ -51,7 +51,17 @@ public interface Tuple {
      * the key buffer.
      */
     public void getKey(ImmutableBytesWritable ptr);
-    
+
+    /**
+     * Get the KeyValue at the given index whose value is concatenated with the serialized list of
+     * dynamic column PColumns for that row key.
+     * @param index the zero-based KeyValue index between 0 and {@link #size()} exclusive
+     * @param dynColsList the serialized list of dynamic column PColumns
+     * @return the KeyValue at the given index
+     * @throws IndexOutOfBoundsException if an invalid index is used
+     */
+    public Cell mergeWithDynColsListBytesAndGetValue(int index, byte[] dynColsList);
+
     /**
      * Get the KeyValue at the given index.
      * @param index the zero-based KeyValue index between 0 and {@link #size()} exclusive
index e67ddeb..d5e3db0 100644 (file)
@@ -64,9 +64,9 @@ public class EncodedColumnsUtil {
         }
     }
     
-    public static final boolean useNewValueColumnQualifier(Scan s) {
+    public static boolean useNewValueColumnQualifier(Scan s) {
         // null check for backward compatibility
-        return s.getAttribute(BaseScannerRegionObserver.USE_NEW_VALUE_COLUMN_QUALIFIER) == null ? false : true;
+        return s.getAttribute(BaseScannerRegionObserver.USE_NEW_VALUE_COLUMN_QUALIFIER) != null;
     }
     
     public static QualifierEncodingScheme getQualifierEncodingScheme(Scan s) {
index a35409f..4b06d46 100644 (file)
@@ -460,7 +460,7 @@ public class MetaDataUtil {
             for (Cell cell : kvs) {
                 KeyValue kv = org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValue(cell);
                 if (builder.compareQualifier(kv, key, 0, key.length) ==0) {
-                    KeyValueBuilder.addQuietly(headerRow, builder, keyValue);
+                    KeyValueBuilder.addQuietly(headerRow, keyValue);
                     return true;
                 }
             }
index 85883ec..d4f1177 100644 (file)
@@ -1154,7 +1154,7 @@ public class PhoenixRuntime {
         return values.toArray();
     }
     
-    private static KeyValueSchema buildKeyValueSchema(List<PColumn> columns) {
+    public static KeyValueSchema buildKeyValueSchema(List<PColumn> columns) {
         KeyValueSchemaBuilder builder = new KeyValueSchemaBuilder(getMinNullableIndex(columns));
         for (PColumn col : columns) {
             builder.addField(col);
index 783417d..b16e401 100644 (file)
@@ -174,7 +174,7 @@ public class MetaDataUtilTest {
     KeyValue kv = builder.buildPut(wrap(ROW), wrap(TABLE_FAMILY_BYTES), wrap(QUALIFIER),
             wrap(ORIGINAL_VALUE));
     Put put = new Put(ROW);
-    KeyValueBuilder.addQuietly(put, builder, kv);
+    KeyValueBuilder.addQuietly(put, kv);
 
     // read back out the value
     ImmutableBytesPtr ptr = new ImmutableBytesPtr();
@@ -189,7 +189,7 @@ public class MetaDataUtilTest {
         byte[] value = Bytes.toBytes("client-value");
         kv = builder.buildPut(wrap(ROW), wrap(TABLE_FAMILY_BYTES), wrap(QUALIFIER), wrap(value));
         put = new Put(ROW);
-        KeyValueBuilder.addQuietly(put, builder, kv);
+        KeyValueBuilder.addQuietly(put, kv);
     
         // read back out the value
         assertTrue(MetaDataUtil.getMutationValue(put, QUALIFIER, builder, ptr));
@@ -244,7 +244,7 @@ public class MetaDataUtilTest {
         KeyValue kv = builder.buildPut(wrap(ROW), wrap(TABLE_FAMILY_BYTES), wrap(QUALIFIER),
                 wrap(ORIGINAL_VALUE));
         Put put = new Put(ROW);
-        KeyValueBuilder.addQuietly(put, builder, kv);
+        KeyValueBuilder.addQuietly(put, kv);
         return put;
     }
 
diff --git a/phoenix-protocol/src/main/DynamicColumnMetaData.proto b/phoenix-protocol/src/main/DynamicColumnMetaData.proto
new file mode 100644 (file)
index 0000000..33466ee
--- /dev/null
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+option java_package = "org.apache.phoenix.coprocessor.generated";
+option java_outer_classname = "DynamicColumnMetaDataProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+option optimize_for = SPEED;
+
+import "PTable.proto";
+
+message DynamicColumnMetaData {
+    repeated PColumn dynamicColumns = 1;
+}
\ No newline at end of file