Example usage for java.sql Statement unwrap

List of usage examples for java.sql Statement unwrap

Introduction

In this page you can find the example usage for java.sql Statement unwrap.

Prototype

<T> T unwrap(java.lang.Class<T> iface) throws java.sql.SQLException;

Source Link

Document

Returns an object that implements the given interface to allow access to non-standard methods, or standard methods not exposed by the proxy.

Usage

From source file:com.micromux.cassandra.jdbc.JdbcRegressionTest.java

private CassandraStatementExtras statementExtras(Statement statement) throws Exception {
    Class cse = Class.forName("com.micromux.cassandra.jdbc.CassandraStatementExtras");
    return (CassandraStatementExtras) statement.unwrap(cse);
}

From source file:org.apache.phoenix.hive.HivePhoenixInputFormat.java

private QueryPlan getQueryPlan() throws IOException {
    try {/*ww  w  .j a  v  a  2 s  .co  m*/
        LOG.debug("PhoenixInputFormat getQueryPlan statement " + this.configuration.get("phoenix.select.stmt"));
        Connection connection = getConnection();
        String selectStatement = PhoenixConfigurationUtil.getSelectStatement(this.configuration);
        Preconditions.checkNotNull(selectStatement);
        Statement statement = connection.createStatement();
        PhoenixStatement pstmt = (PhoenixStatement) statement.unwrap(PhoenixStatement.class);
        this.queryPlan = pstmt.compileQuery(selectStatement);
        this.queryPlan.iterator();
    } catch (Exception exception) {
        LOG.error(String.format("Failed to get the query plan with error [%s]",
                new Object[] { exception.getMessage() }));
        throw new RuntimeException(exception);
    }
    return this.queryPlan;
}

From source file:org.apache.phoenix.hive.mapreduce.PhoenixInputFormat.java

/**
 * Returns the query plan associated with the select query.
 *//*from   www . j av a 2  s  . c o  m*/
private QueryPlan getQueryPlan(final Configuration configuration, String selectStatement) throws IOException {
    try {
        final String currentScnValue = configuration.get(PhoenixConfigurationUtil.CURRENT_SCN_VALUE);
        final Properties overridingProps = new Properties();
        if (currentScnValue != null) {
            overridingProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, currentScnValue);
        }
        final Connection connection = PhoenixConnectionUtil.getInputConnection(configuration, overridingProps);
        Preconditions.checkNotNull(selectStatement);
        final Statement statement = connection.createStatement();
        final PhoenixStatement pstmt = statement.unwrap(PhoenixStatement.class);

        if (LOG.isDebugEnabled()) {
            LOG.debug("Compiled query : " + selectStatement);
        }

        // Optimize the query plan so that we potentially use secondary indexes
        final QueryPlan queryPlan = pstmt.optimizeQuery(selectStatement);
        // Initialize the query plan so it sets up the parallel scans
        queryPlan.iterator(MapReduceParallelScanGrouper.getInstance());
        return queryPlan;
    } catch (Exception exception) {
        LOG.error(String.format("Failed to get the query plan with error [%s]", exception.getMessage()));
        throw new RuntimeException(exception);
    }
}

From source file:org.apache.phoenix.mapreduce.PhoenixInputFormat.java

/**
 * Returns the query plan associated with the select query.
 * @param context//from   www  . ja v a2  s.c  o m
 * @return
 * @throws IOException
 * @throws SQLException
 */
private QueryPlan getQueryPlan(final JobContext context, final Configuration configuration) throws IOException {
    Preconditions.checkNotNull(context);
    try {
        final String currentScnValue = configuration.get(PhoenixConfigurationUtil.CURRENT_SCN_VALUE);
        final Properties overridingProps = new Properties();
        if (currentScnValue != null) {
            overridingProps.put(PhoenixRuntime.CURRENT_SCN_ATTRIB, currentScnValue);
        }
        final Connection connection = ConnectionUtil.getInputConnection(configuration, overridingProps);
        final String selectStatement = PhoenixConfigurationUtil.getSelectStatement(configuration);
        Preconditions.checkNotNull(selectStatement);
        final Statement statement = connection.createStatement();
        final PhoenixStatement pstmt = statement.unwrap(PhoenixStatement.class);
        // Optimize the query plan so that we potentially use secondary indexes            
        final QueryPlan queryPlan = pstmt.optimizeQuery(selectStatement);
        // Initialize the query plan so it sets up the parallel scans
        queryPlan.iterator(MapReduceParallelScanGrouper.getInstance());
        return queryPlan;
    } catch (Exception exception) {
        LOG.error(String.format("Failed to get the query plan with error [%s]", exception.getMessage()));
        throw new RuntimeException(exception);
    }
}

From source file:org.apache.phoenix.pig.hadoop.PhoenixInputFormat.java

/**
 * Returns the query plan associated with the select query.
 * @param context// w w w  .  jav  a  2s.  c  o  m
 * @return
 * @throws IOException
 * @throws SQLException
 */
private QueryPlan getQueryPlan(final JobContext context) throws IOException {
    Preconditions.checkNotNull(context);
    if (queryPlan == null) {
        try {
            final Connection connection = getConnection();
            final String selectStatement = getConf().getSelectStatement();
            Preconditions.checkNotNull(selectStatement);
            final Statement statement = connection.createStatement();
            final PhoenixStatement pstmt = statement.unwrap(PhoenixStatement.class);
            // Optimize the query plan so that we potentially use secondary indexes
            this.queryPlan = pstmt.optimizeQuery(selectStatement);
            // Initialize the query plan so it sets up the parallel scans
            queryPlan.iterator();
        } catch (Exception exception) {
            LOG.error(String.format("Failed to get the query plan with error [%s]", exception.getMessage()));
            throw new RuntimeException(exception);
        }
    }
    return queryPlan;
}

From source file:org.apache.phoenix.pig.util.QuerySchemaParserFunction.java

@Override
public Pair<String, String> apply(final String selectStatement) {
    Preconditions.checkNotNull(selectStatement);
    Preconditions.checkArgument(!selectStatement.isEmpty(), "Select Query is empty!!");
    Connection connection = null;
    try {//from   w w  w.j  a  v a  2  s.  c  o m
        connection = ConnectionUtil.getInputConnection(this.configuration);
        final Statement statement = connection.createStatement();
        final PhoenixStatement pstmt = statement.unwrap(PhoenixStatement.class);
        final QueryPlan queryPlan = pstmt.compileQuery(selectStatement);
        isValidStatement(queryPlan);
        final String tableName = queryPlan.getTableRef().getTable().getName().getString();
        final List<? extends ColumnProjector> projectedColumns = queryPlan.getProjector().getColumnProjectors();
        final List<String> columns = Lists.transform(projectedColumns, new Function<ColumnProjector, String>() {
            @Override
            public String apply(ColumnProjector column) {
                return column.getName();
            }
        });
        final String columnsAsStr = Joiner.on(",").join(columns);
        return new Pair<String, String>(tableName, columnsAsStr);
    } catch (SQLException e) {
        LOG.error(String.format(" Error [%s] parsing SELECT query [%s] ", e.getMessage(), selectStatement));
        throw new RuntimeException(e);
    } finally {
        if (connection != null) {
            try {
                connection.close();
            } catch (SQLException sqle) {
                LOG.error(" Error closing connection ");
                throw new RuntimeException(sqle);
            }
        }
    }
}

From source file:org.apache.phoenix.pig.util.SqlQueryToColumnInfoFunction.java

@Override
public List<ColumnInfo> apply(String sqlQuery) {
    Preconditions.checkNotNull(sqlQuery);
    Connection connection = null;
    List<ColumnInfo> columnInfos = null;
    try {//  w w  w . j ava 2  s . c  om
        connection = ConnectionUtil.getInputConnection(this.configuration);
        final Statement statement = connection.createStatement();
        final PhoenixStatement pstmt = statement.unwrap(PhoenixStatement.class);
        final QueryPlan queryPlan = pstmt.compileQuery(sqlQuery);
        final List<? extends ColumnProjector> projectedColumns = queryPlan.getProjector().getColumnProjectors();
        columnInfos = Lists.newArrayListWithCapacity(projectedColumns.size());
        columnInfos = Lists.transform(projectedColumns, new Function<ColumnProjector, ColumnInfo>() {
            @Override
            public ColumnInfo apply(final ColumnProjector columnProjector) {
                return new ColumnInfo(columnProjector.getName(),
                        columnProjector.getExpression().getDataType().getSqlType());
            }

        });
    } catch (SQLException e) {
        LOG.error(String.format(" Error [%s] parsing SELECT query [%s] ", e.getMessage(), sqlQuery));
        throw new RuntimeException(e);
    } finally {
        if (connection != null) {
            try {
                connection.close();
            } catch (SQLException sqle) {
                LOG.error("Error closing connection!!");
                throw new RuntimeException(sqle);
            }
        }
    }
    return columnInfos;
}