Example usage for java.sql Connection createArrayOf

List of usage examples for java.sql Connection createArrayOf

Introduction

In this page you can find the example usage for java.sql Connection createArrayOf.

Prototype

Array createArrayOf(String typeName, Object[] elements) throws SQLException;

Source Link

Document

Factory method for creating Array objects.

Usage

From source file:ru.org.linux.user.ProfileDao.java

public void writeProfile(@Nonnull final User user, @Nonnull final Profile profile) {
    String boxlets[] = null;/*  ww w. ja  v a 2s . c o m*/

    List<String> customBoxlets = profile.getCustomBoxlets();

    if (customBoxlets != null) {
        boxlets = customBoxlets.toArray(new String[customBoxlets.size()]);
    }

    final String[] finalBoxlets = boxlets;
    if (jdbcTemplate.update(new PreparedStatementCreator() {
        @Override
        public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
            PreparedStatement st = con
                    .prepareStatement("UPDATE user_settings SET settings=?, main=? WHERE id=?");

            st.setObject(1, profile.getSettings());

            if (finalBoxlets != null) {
                st.setArray(2, con.createArrayOf("text", finalBoxlets));
            } else {
                st.setNull(2, Types.ARRAY);
            }

            st.setInt(3, user.getId());

            return st;
        }
    }) == 0) {
        jdbcTemplate.update(new PreparedStatementCreator() {
            @Override
            public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
                PreparedStatement st = con
                        .prepareStatement("INSERT INTO user_settings (id, settings, main) VALUES (?,?,?)");

                st.setInt(1, user.getId());

                st.setObject(2, profile.getSettings());

                if (finalBoxlets != null) {
                    st.setArray(3, con.createArrayOf("text", finalBoxlets));
                } else {
                    st.setNull(3, Types.ARRAY);
                }

                return st;
            }
        });
    }
}

From source file:io.lightlink.types.ArrayConverter.java

@Override
public Object convertToJdbc(Connection connection, RunnerContext runnerContext, String name, Object value)
        throws IOException, SQLException {

    if (value == null)
        return null;

    if (value instanceof List)
        value = ((List) value).toArray();
    else if (value instanceof String) {
        value = StringUtils.isBlank((String) value) ? new Object[0] : new Object[] { value };
    } else if (value instanceof Number) {
        value = new Object[] { value };
    }//from   ww w  .  ja  va 2s  .  c o  m
    return connection.createArrayOf(type, (Object[]) value);

}

From source file:com.adaptris.jdbc.connection.FailoverDatasourceTest.java

@Test
public void testTypes() throws Exception {
    Connection conn = new MyProxy();

    try {/*  w ww  .ja  va 2 s . com*/
        try {
            conn.createBlob();
        } catch (Exception e) {

        }
        try {
            conn.createClob();
        } catch (Exception e) {

        }
        try {
            conn.createNClob();
        } catch (Exception e) {

        }
        try {
            conn.createSQLXML();
        } catch (Exception e) {

        }
        try {
            conn.createStruct("java.lang.String", new String[] { "hello"

            });
        } catch (Exception e) {
        }
        try {
            conn.createArrayOf("java.lang.String", new String[] { "hello", "world" });
        } catch (Exception e) {
        }
    } finally {
        JdbcUtil.closeQuietly(conn);
    }
}

From source file:com.streamsets.pipeline.lib.jdbc.JdbcMultiRowRecordWriter.java

@SuppressWarnings("unchecked")
private void processPartition(Connection connection, Multimap<Long, Record> partitions, Long partitionKey,
        List<OnRecordErrorException> errorRecords) throws SQLException, OnRecordErrorException {
    Collection<Record> partition = partitions.get(partitionKey);
    // Fetch the base insert query for this partition.
    SortedMap<String, String> columnsToParameters = getFilteredColumnsToParameters(getColumnsToParameters(),
            partition.iterator().next());

    // put all the records in a queue for consumption
    LinkedList<Record> queue = new LinkedList<>(partition);

    // compute number of rows per batch
    if (columnsToParameters.isEmpty()) {
        throw new OnRecordErrorException(Errors.JDBCDEST_22);
    }/*from  w ww . j  av  a2s .co  m*/
    int maxRowsPerBatch = maxPrepStmtParameters / columnsToParameters.size();

    PreparedStatement statement = null;

    // parameters are indexed starting with 1
    int paramIdx = 1;
    int rowCount = 0;
    while (!queue.isEmpty()) {
        // we're at the start of a batch.
        if (statement == null) {
            // instantiate the new statement
            statement = generatePreparedStatement(columnsToParameters,
                    // the next batch will have either the max number of records, or however many are left.
                    Math.min(maxRowsPerBatch, queue.size()), getTableName(), connection);
        }

        // process the next record into the current statement
        Record record = queue.removeFirst();
        for (String column : columnsToParameters.keySet()) {
            Field field = record.get(getColumnsToFields().get(column));
            Field.Type fieldType = field.getType();
            Object value = field.getValue();

            try {
                switch (fieldType) {
                case LIST:
                    List<Object> unpackedList = unpackList((List<Field>) value);
                    Array array = connection.createArrayOf(getSQLTypeName(fieldType), unpackedList.toArray());
                    statement.setArray(paramIdx, array);
                    break;
                case DATE:
                case DATETIME:
                    // Java Date types are not accepted by JDBC drivers, so we need to convert to java.sql.Date
                    java.util.Date date = field.getValueAsDatetime();
                    statement.setObject(paramIdx, new java.sql.Date(date.getTime()));
                    break;
                default:
                    statement.setObject(paramIdx, value, getColumnType(column));
                    break;
                }
            } catch (SQLException e) {
                LOG.error(Errors.JDBCDEST_23.getMessage(), column, fieldType.toString(), e);
                throw new OnRecordErrorException(record, Errors.JDBCDEST_23, column, fieldType.toString());
            }
            ++paramIdx;
        }

        rowCount++;

        // check if we've filled up the current batch
        if (rowCount == maxRowsPerBatch) {
            // time to execute the current batch
            statement.addBatch();
            statement.executeBatch();
            statement.close();
            statement = null;

            // reset our counters
            rowCount = 0;
            paramIdx = 1;
        }
    }

    // check if there are any records left. this should occur whenever there isn't *exactly* maxRowsPerBatch records in
    // this partition.
    if (statement != null) {
        statement.addBatch();
        statement.executeBatch();
        statement.close();
    }
}

From source file:de.whs.poodle.repositories.ExerciseRepository.java

public void save(Exercise exercise) {
    if (exercise.getTitle().trim().isEmpty())
        throw new BadRequestException("noTitleSpecified");
    if (exercise.getText().trim().isEmpty())
        throw new BadRequestException("noExerciseTextSpecified");

    jdbc.execute(new ConnectionCallback<Void>() {

        @Override/*from  w w w. ja  v a2  s  .c o  m*/
        public Void doInConnection(Connection con) throws SQLException, DataAccessException {
            try (CallableStatement exercisePs = con
                    .prepareCall("{ ? = CALL create_exercise(?,?,?::exercise_visibility,?,?,?,?,?,?,?,?,?) }");

                    PreparedStatement tagsPs = con
                            .prepareStatement("INSERT INTO exercise_to_tag(exercise_id,tag_id) VALUES(?,?)");) {
                con.setAutoCommit(false);

                // inner try for rollback
                try {
                    // create exercise
                    exercisePs.registerOutParameter(1, Types.INTEGER); // new_id

                    exercisePs.setString(2, exercise.getText());

                    /*
                     * The root id is always the ID of the first revision. If this
                     * is a new exercise, this ID obviously doesn't exist yet. We set
                     * NULL in this case, but a trigger in the DB will automatically
                     * set the root_id to the generated id.
                     */
                    if (exercise.getRootId() == 0)
                        exercisePs.setNull(3, Types.INTEGER);
                    else
                        exercisePs.setInt(3, exercise.getRootId());

                    exercisePs.setString(4, exercise.getVisibility().toString());
                    exercisePs.setString(5, exercise.getTitle());
                    exercisePs.setInt(6, exercise.getChangedBy().getId());
                    exercisePs.setString(7, exercise.getHint1());
                    exercisePs.setString(8, exercise.getHint2());

                    // sample solution
                    SampleSolutionType sampleSolutionType = exercise.getSampleSolutionType();

                    if (sampleSolutionType == SampleSolutionType.NONE) {
                        exercisePs.setNull(9, Types.INTEGER);
                        exercisePs.setNull(10, Types.VARCHAR);
                    } else if (sampleSolutionType == SampleSolutionType.FILE) {
                        exercisePs.setInt(9, exercise.getSampleSolution().getFile().getId());
                        exercisePs.setNull(10, Types.VARCHAR);
                    } else { // must be text
                        exercisePs.setNull(9, Types.INTEGER);
                        exercisePs.setString(10, exercise.getSampleSolution().getText());
                    }

                    // attachments
                    List<Integer> attachmentIds = exercise.getAttachments().stream().map(a -> a.getId())
                            .collect(Collectors.toList());

                    Array anhaengeIdsArray = con.createArrayOf("int4", attachmentIds.toArray());
                    exercisePs.setArray(11, anhaengeIdsArray);

                    exercisePs.setInt(12, exercise.getCourseId());

                    exercisePs.setString(13, exercise.getComment());

                    exercisePs.executeUpdate();

                    /* Set the generated ID so the calling function can read it. */
                    exercise.setId(exercisePs.getInt(1));

                    // create relation to tags
                    tagsPs.setInt(1, exercise.getId());

                    for (Tag t : exercise.getTags()) {
                        tagsPs.setInt(2, t.getId());
                        tagsPs.addBatch();
                    }

                    tagsPs.executeBatch();

                    con.commit();
                } catch (SQLException e) {
                    con.rollback();
                    throw e;
                } finally {
                    con.setAutoCommit(true);
                }
            }

            return null;
        }
    });
}

From source file:org.apache.hadoop.hive.metastore.MyXid.java

@Override
public void createTable(Table tbl) throws InvalidObjectException, MetaException, AlreadyExistsException {

    if (tbl == null) {
        throw new InvalidObjectException("unvalid parameters, tbl is null");
    }//from www. j  av a  2  s  .  com

    if (tbl.getTableType() == null) {
        tbl.setTableType("MANAGED_TABLE");
    }

    if (tbl.getTableType().equalsIgnoreCase("VIRTUAL_VIEW")) {
        jdbcCreateView(tbl);
        return;
    }

    tbl.setDbName(tbl.getDbName().toLowerCase());
    tbl.setTableName(tbl.getTableName().toLowerCase());

    LOG.debug("first, check the name is valid or not");
    if (!MetaStoreUtils.validateName(tbl.getTableName())
            || !MetaStoreUtils.validateColNames(tbl.getSd().getCols())
            || (tbl.getPriPartition() != null
                    && !MetaStoreUtils.validateName(tbl.getPriPartition().getParKey().getName()))
            || (tbl.getSubPartition() != null
                    && !MetaStoreUtils.validateName(tbl.getSubPartition().getParKey().getName()))) {
        throw new InvalidObjectException(tbl.getTableName() + " is not a valid object name");
    }

    long tblID = genTblID(tbl.getDbName(), tbl.getTableName());

    boolean success = false;

    Connection con;
    PreparedStatement ps = null;
    Statement stmt = null;
    Path tblPath = null;
    Warehouse wh = new Warehouse(hiveConf);
    boolean madeDir = false;

    LOG.debug("2, generate table path ");

    if (tbl.getSd().getLocation() == null || tbl.getSd().getLocation().isEmpty()) {
        tblPath = wh.getDefaultTablePath(tbl.getDbName(), tbl.getTableName());
    } else {
        if (tbl.getTableType().equalsIgnoreCase("EXTERNAL_TABLE")) {
            LOG.warn("Location: " + tbl.getSd().getLocation() + "specified for non-external table:"
                    + tbl.getTableName());
        }

        tblPath = wh.getDnsPath(new Path(tbl.getSd().getLocation()));
    }
    tbl.getSd().setLocation(tblPath.toString());

    try {
        con = getSegmentConnection(tbl.getDbName());
    } catch (MetaStoreConnectException e1) {
        LOG.error("create table error, db=" + tbl.getDbName() + ", table=" + tbl.getTableName() + ", msg="
                + e1.getMessage());
        throw new MetaException(e1.getMessage());
    } catch (SQLException e1) {
        LOG.error("create table error, db=" + tbl.getDbName() + ", table=" + tbl.getTableName() + ", msg="
                + e1.getMessage());
        throw new MetaException(e1.getMessage());
    }

    try {
        con.setAutoCommit(false);
        con.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);

        stmt = con.createStatement();

        LOG.debug("1 check the table is exist or not");
        String sql = "select tbl_id from tbls where db_name='" + tbl.getDbName().toLowerCase()
                + "' and tbl_name='" + tbl.getTableName().toLowerCase() + "'";

        boolean isTblFind = false;
        ResultSet checkTblSet = stmt.executeQuery(sql);

        while (checkTblSet.next()) {
            isTblFind = true;
            break;
        }
        checkTblSet.close();

        if (isTblFind) {
            throw new AlreadyExistsException(
                    "table " + tbl.getDbName() + ":" + tbl.getTableName() + " has exist");
        }

        LOG.debug("2 insert into tbls");

        ps = con.prepareStatement("INSERT INTO TBLS(tbl_id, is_compressed, retention, tbl_type, db_name, "
                + "tbl_name, tbl_owner, tbl_format"
                + ", pri_part_type, sub_part_type, pri_part_key, sub_part_key, input_format, output_format"
                + ", serde_name, serde_lib, tbl_location, tbl_comment)"
                + " values(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)");

        StorageDescriptor sd = tbl.getSd();
        if (sd == null || sd.getSerdeInfo() == null) {
            throw new MetaException("storage descriptor of table " + tbl.getTableName() + " is null");
        }

        SerDeInfo sdInfo = sd.getSerdeInfo();

        ps.setLong(1, tblID);
        ps.setBoolean(2, sd.isCompressed());
        ps.setLong(3, tbl.getRetention());
        if (tbl.getParameters() != null && tbl.getParameters().get("EXTERNAL") != null
                && tbl.getParameters().get("EXTERNAL").equalsIgnoreCase("TRUE")) {
            ps.setString(4, "EXTERNAL_TABLE");
        } else {
            ps.setString(4, tbl.getTableType());
        }
        ps.setString(5, tbl.getDbName());
        ps.setString(6, tbl.getTableName());
        ps.setString(7, tbl.getOwner());

        if (tbl.getParameters() == null) {
            ps.setString(8, null);
        } else {
            ps.setString(8, tbl.getParameters().get("type"));
        }

        Partition priPart = tbl.getPriPartition();
        Partition subPart = tbl.getSubPartition();
        if (priPart != null) {
            ps.setString(11, priPart.getParKey().getName());
            ps.setString(9, priPart.getParType());
        } else {
            ps.setString(11, null);
            ps.setString(9, null);
        }

        if (subPart != null) {
            ps.setString(12, subPart.getParKey().getName());
            ps.setString(10, subPart.getParType());
        } else {
            ps.setString(12, null);
            ps.setString(10, null);
        }

        ps.setString(13, sd.getInputFormat());
        ps.setString(14, sd.getOutputFormat());
        ps.setString(15, sdInfo.getName());
        ps.setString(16, sdInfo.getSerializationLib());
        ps.setString(17, sd.getLocation());

        if (tbl.getParameters() == null) {
            ps.setString(18, null);
        } else {
            ps.setString(18, tbl.getParameters().get("comment"));
        }

        ps.executeUpdate();

        ps.close();

        LOG.debug("3 insert into partitions");
        if (priPart != null) {
            ps = con.prepareStatement(
                    "INSERT INTO PARTITIONS(level, tbl_id," + "part_name, part_values) values(?,?,?,?)");

            Map<String, List<String>> partSpaceMap = priPart.getParSpaces();

            for (Map.Entry<String, List<String>> entry : partSpaceMap.entrySet()) {
                ps.setInt(1, 0);
                ps.setLong(2, tblID);
                ps.setString(3, entry.getKey());
                if (entry.getValue() != null) {
                    Array spaceArray = con.createArrayOf("varchar", entry.getValue().toArray());
                    ps.setArray(4, spaceArray);
                } else {
                    ps.setArray(4, null);
                }

                ps.addBatch();
            }
            ps.executeBatch();
            ps.close();
        }

        if (subPart != null) {
            ps = con.prepareStatement(
                    "INSERT INTO PARTITIONS(level, tbl_id," + "part_name, part_values) values(?,?,?,?)");

            Map<String, List<String>> partSpaceMap = subPart.getParSpaces();

            for (Map.Entry<String, List<String>> entry : partSpaceMap.entrySet()) {
                ps.setInt(1, 1);
                ps.setLong(2, tblID);
                ps.setString(3, entry.getKey());

                if (entry.getValue() != null) {
                    Array spaceArray = con.createArrayOf("varchar", entry.getValue().toArray());
                    ps.setArray(4, spaceArray);
                } else {
                    ps.setArray(4, null);
                }

                ps.addBatch();
            }
            ps.executeBatch();
            ps.close();
        }

        LOG.debug("4 insert into columns");
        ps = con.prepareStatement("INSERT INTO COLUMNS(column_index, tbl_id, column_name, type_name, comment) "
                + " values(?,?,?,?,?)");

        List<FieldSchema> fieldList = sd.getCols();
        int fieldSize = fieldList.size();

        for (int i = 0; i < fieldSize; i++) {
            FieldSchema field = fieldList.get(i);
            ps.setInt(1, i);
            ps.setLong(2, tblID);
            ps.setString(3, field.getName().toLowerCase());
            ps.setString(4, field.getType());
            ps.setString(5, field.getComment());

            ps.addBatch();
        }

        ps.executeBatch();
        ps.close();

        LOG.debug("5  insert into parameters");

        boolean createExtDirIfNotExist = true;
        if (tbl.getParametersSize() > 0) {
            String createExtDirIfNotExistStr = tbl.getParameters().get("hive.exttable.createdir.ifnotexist");
            LOG.info("XXcreateExtDirIfNotExistStr=" + createExtDirIfNotExistStr);
            if (createExtDirIfNotExistStr != null && createExtDirIfNotExistStr.equalsIgnoreCase("false")) {
                createExtDirIfNotExist = false;
            }
            tbl.getParameters().remove("hive.exttable.createdir.ifnotexist");
        }

        if (tbl.getParametersSize() > 0 || sd.getParametersSize() > 0
                || sd.getSerdeInfo().getParametersSize() > 0 || sd.getNumBuckets() > -1) {
            ps = con.prepareStatement("insert into table_params(tbl_id, param_type, param_key, param_value) "
                    + " values(?,?,?,?)");
            if (tbl.getParametersSize() > 0) {

                for (Map.Entry<String, String> entry : tbl.getParameters().entrySet()) {
                    if (entry.getKey().equalsIgnoreCase("type") || entry.getKey().equalsIgnoreCase("comment"))
                        continue;
                    ps.setLong(1, tblID);
                    ps.setString(2, "TBL");
                    ps.setString(3, entry.getKey());
                    ps.setString(4, entry.getValue());

                    ps.addBatch();
                }
            }

            if (sd.getParametersSize() > 0) {
                for (Map.Entry<String, String> entry : sd.getParameters().entrySet()) {
                    ps.setLong(1, tblID);
                    ps.setString(2, "SD");
                    ps.setString(3, entry.getKey());
                    ps.setString(4, entry.getValue());

                    ps.addBatch();
                }
            }

            if (sd.getSerdeInfo().getParametersSize() > 0) {
                for (Map.Entry<String, String> entry : sd.getSerdeInfo().getParameters().entrySet()) {
                    ps.setLong(1, tblID);
                    ps.setString(2, "SERDE");
                    ps.setString(3, entry.getKey());
                    ps.setString(4, entry.getValue());

                    ps.addBatch();
                }
            }

            if (sd.getNumBuckets() > -1) {
                ps.setLong(1, tblID);
                ps.setString(2, "SD");
                ps.setString(3, "NUM_BUCKETS");
                ps.setString(4, String.valueOf(sd.getNumBuckets()));
                ps.addBatch();
            }

            ps.executeBatch();
            ps.close();
        }

        if (tbl.getSd().getBucketCols() != null && !tbl.getSd().getBucketCols().isEmpty()) {
            ps = con.prepareStatement(
                    "insert into bucket_cols(tbl_id, bucket_col_name, col_index) values(?,?,?)");
            int index = 0;
            for (String col : tbl.getSd().getBucketCols()) {
                ps.setLong(1, tblID);
                ps.setString(2, col.toLowerCase());
                ps.setInt(3, index);
                index++;
                ps.addBatch();
            }

            ps.executeBatch();
            ps.close();
        }

        if (tbl.getSd().getSortCols() != null && !tbl.getSd().getSortCols().isEmpty()) {
            ps = con.prepareStatement(
                    "insert into sort_cols(tbl_id, sort_column_name, sort_order, col_index) values(?,?,?,?)");
            int index = 0;
            for (Order o : tbl.getSd().getSortCols()) {
                ps.setLong(1, tblID);
                ps.setString(2, o.getCol());
                ps.setInt(3, o.getOrder());
                ps.setInt(4, index);
                index++;
                ps.addBatch();
            }

            ps.executeBatch();
            ps.close();
        }

        LOG.debug("make hdfs directory for table");

        if (createExtDirIfNotExist && tblPath != null) {
            if (!wh.isDir(tblPath)) {
                if (!wh.mkdirs(tblPath)) {
                    throw new MetaException(tblPath + " is not a directory or unable to create one");
                }
                madeDir = true;
            }

            if (tbl.getPriPartition() != null) {
                Set<String> priPartNames = tbl.getPriPartition().getParSpaces().keySet();

                Set<String> subPartNames = null;
                if (tbl.getSubPartition() != null) {
                    subPartNames = tbl.getSubPartition().getParSpaces().keySet();
                }

                List<Path> partPaths = Warehouse.getPartitionPaths(tblPath, priPartNames, subPartNames);

                for (Path partPath : partPaths) {
                    if (!wh.mkdirs(partPath)) {
                        throw new MetaException(
                                "Partition path " + partPath + " is not a directory or unable to create one.");
                    }
                }
            }
        }

        con.commit();

        success = true;
    } catch (SQLException sqlex) {
        LOG.error("create table error db=" + tbl.getDbName() + ", table=" + tbl.getTableName() + ",msg="
                + sqlex.getMessage());
        sqlex.printStackTrace();
        throw new MetaException(sqlex.getMessage());
    } finally {
        if (!success) {
            try {
                con.rollback();
            } catch (SQLException e) {
            }

            if (madeDir) {
                wh.deleteDir(tblPath, true);
            }
        }

        closeStatement(stmt);
        closeStatement(ps);
        closeConnection(con);
    }

    return;
}

From source file:org.apache.hadoop.hive.metastore.MyXid.java

public void addPartition(String dbName, String tblName, AddPartitionDesc addPartitionDesc)
        throws InvalidObjectException, MetaException {
    boolean success = false;

    Connection con = null;
    PreparedStatement ps = null;//from   w w w .j a v  a2 s  .c  o  m
    Statement stmt = null;
    dbName = dbName.toLowerCase();
    tblName = tblName.toLowerCase();

    boolean isPathMaked = false;
    ArrayList<Path> pathToMake = new ArrayList<Path>();
    Warehouse wh = new Warehouse(hiveConf);

    long tblID = 0;

    try {
        con = getSegmentConnection(dbName);
    } catch (MetaStoreConnectException e1) {
        LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                + addPartitionDesc.getLevel() + ", msg=" + e1.getMessage());
        throw new MetaException(e1.getMessage());
    } catch (SQLException e1) {
        LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                + addPartitionDesc.getLevel() + ", msg=" + e1.getMessage());
        throw new MetaException(e1.getMessage());
    }

    try {
        con.setAutoCommit(false);
        con.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ);
        stmt = con.createStatement();

        String tblType = null;
        boolean hasPriPart = false;
        boolean hasSubPart = false;
        String priPartKey = null;
        String subPartKey = null;
        String priPartType = null;
        String subPartType = null;

        String priKeyType = null;
        String subKeyType = null;
        ResultSet tblSet = null;
        boolean isTblFind = false;
        boolean isColFind = false;

        String tblFormat = null;
        String tblLocation = null;

        PrimitiveTypeInfo pti = null;
        ObjectInspector StringIO = null;
        ObjectInspector ValueIO = null;
        ObjectInspectorConverters.Converter converter1 = null;
        ObjectInspectorConverters.Converter converter2 = null;

        ArrayList<String> partToAdd = new ArrayList<String>();
        String sql = null;

        HiveConf hconf = (HiveConf) hiveConf;
        boolean externalPartition = hconf.getBoolVar(HiveConf.ConfVars.HIVESUPPORTEXTERNALPARTITION);

        if (addPartitionDesc.getLevel() == 0) {
            sql = "SELECT tbl_id, tbl_type, pri_part_type, pri_part_key, tbl_format, tbl_location"
                    + " from TBLS where db_name='" + dbName + "' and tbl_name='" + tblName + "'";

            tblSet = stmt.executeQuery(sql);
            isTblFind = false;

            while (tblSet.next()) {
                isTblFind = true;
                tblID = tblSet.getLong(1);
                tblType = tblSet.getString(2);
                priPartKey = tblSet.getString(4);
                priPartType = tblSet.getString(3);
                tblFormat = tblSet.getString(5);
                tblLocation = tblSet.getString(6);

                if (priPartType != null && !priPartType.isEmpty()) {
                    hasPriPart = true;
                }
                break;
            }
            tblSet.close();

            if (!isTblFind) {
                LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                        + addPartitionDesc.getLevel() + ", msg=" + "can not find table " + dbName + ":"
                        + tblName);

                throw new MetaException("can not find table " + dbName + ":" + tblName);
            }

            if (!tblType.equalsIgnoreCase("MANAGED_TABLE")) {
                if (tblType.equalsIgnoreCase("EXTERNAL_TABLE") && tblFormat != null
                        && tblFormat.equalsIgnoreCase("pgdata")) {
                    LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                            + addPartitionDesc.getLevel() + ", msg=" + tblType + ":" + tblFormat
                            + " can not support alter partition");
                    throw new MetaException(tblType + ":" + tblFormat + " can not support alter partition");
                }

                if (externalPartition && tblType.equalsIgnoreCase("EXTERNAL_TABLE")
                        && (tblFormat == null || !tblFormat.equalsIgnoreCase("pgdata"))) {
                } else {
                    LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                            + addPartitionDesc.getLevel() + ", msg=" + tblType
                            + " can not support alter partition");

                    throw new MetaException(tblType + " can not support alter partition");
                }
            }

            if (!hasPriPart) {
                LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                        + addPartitionDesc.getLevel() + ", msg=" + "table " + dbName + ":" + tblName
                        + " is not pri-partitioned");

                throw new MetaException("table " + dbName + ":" + tblName + " is not pri-partitioned");
            }

            sql = "SELECT type_name from COLUMNS where tbl_id=" + tblID + " and column_name='"
                    + priPartKey.toLowerCase() + "'";
            isColFind = false;
            ResultSet colSet = stmt.executeQuery(sql);
            while (colSet.next()) {
                isColFind = true;
                priKeyType = colSet.getString(1);
                break;
            }
            colSet.close();

            if (!isColFind) {
                LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                        + addPartitionDesc.getLevel() + ", msg=" + "table "
                        + "can not find partition key information " + priPartKey);

                throw new MetaException("can not find partition key information " + priPartKey);
            }

            pti = new PrimitiveTypeInfo();
            pti.setTypeName(priKeyType);
            StringIO = PrimitiveObjectInspectorFactory
                    .getPrimitiveJavaObjectInspector(PrimitiveCategory.STRING);
            ValueIO = PrimitiveObjectInspectorFactory
                    .getPrimitiveWritableObjectInspector(pti.getPrimitiveCategory());
            converter1 = ObjectInspectorConverters.getConverter(StringIO, ValueIO);
            converter2 = ObjectInspectorConverters.getConverter(StringIO, ValueIO);

            if ((addPartitionDesc.getPartType().equalsIgnoreCase("RANGE_PARTITION")
                    && !priPartType.equalsIgnoreCase("range"))
                    || (addPartitionDesc.getPartType().equalsIgnoreCase("LIST_PARTITION")
                            && !priPartType.equalsIgnoreCase("list"))) {
                LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                        + addPartitionDesc.getLevel() + ", msg=" + "can not add  a "
                        + addPartitionDesc.getPartType() + " partition, but the pri-partition type is "
                        + priPartType);

                throw new MetaException("can not add  a " + addPartitionDesc.getPartType()
                        + " partition, but the pri-partition type is " + priPartType);
            }

            LinkedHashMap<String, List<String>> partSpaces = new LinkedHashMap<String, List<String>>();
            Set<String> subPartNameSet = new TreeSet<String>();

            sql = "SELECT level, part_name, part_values from PARTITIONS where" + " tbl_id=" + tblID;// + " order by level asc";

            ResultSet partSet = stmt.executeQuery(sql);
            int partLevel = 0;

            while (partSet.next()) {
                partLevel = partSet.getInt(1);

                if (partLevel == 0) {
                    String partName = partSet.getString(2);
                    List<String> valueList = new ArrayList<String>();
                    Array spaceArray = partSet.getArray(3);

                    ResultSet priValueSet = spaceArray.getResultSet();

                    while (priValueSet.next()) {
                        valueList.add(priValueSet.getString(2));
                    }

                    partSpaces.put(partName, valueList);
                } else if (partLevel == 1) {
                    String partName = partSet.getString(2);
                    subPartNameSet.add(partName);
                }
            }
            partSet.close();

            partToAdd = new ArrayList<String>();

            LinkedHashMap<String, List<String>> addPartSpaces = (LinkedHashMap<String, List<String>>) addPartitionDesc
                    .getParSpaces();

            Iterator<String> itr = addPartSpaces.keySet().iterator();

            while (itr.hasNext()) {
                String key = itr.next().toLowerCase();
                if (partSpaces.containsKey(key)) {
                    LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                            + addPartitionDesc.getLevel() + ", msg=" + "table : " + tblName
                            + " have already contain a pri parititon named: " + key);

                    throw new MetaException(
                            "table : " + tblName + " have already contain a pri parititon named: " + key);
                }
                partToAdd.add(key);
            }

            Iterator<List<String>> listItr = addPartSpaces.values().iterator();

            while (listItr.hasNext()) {
                Iterator<String> valueItr = listItr.next().iterator();
                if (valueItr.hasNext()) {
                    String value = valueItr.next();

                    if (converter1.convert(value) == null) {
                        LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                                + addPartitionDesc.getLevel() + ", msg=" + "value : " + value
                                + " should be type of " + priKeyType);

                        throw new MetaException("value : " + value + " should be type of " + priKeyType);
                    }

                    Iterator<List<String>> PartValuesItr = partSpaces.values().iterator();
                    while (PartValuesItr.hasNext()) {
                        if (PartValuesItr.next().contains(value)) {
                            LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                                    + addPartitionDesc.getLevel() + ", msg=" + "table : " + tblName
                                    + " have already contain a pri partition contain value: " + value);

                            throw new MetaException("table : " + tblName
                                    + " have already contain a pri partition contain value: " + value);
                        }
                    }
                }
            }

            ps = con.prepareStatement(
                    "INSERT INTO partitions(level, tbl_id, " + " part_name, part_values) values(?,?,?,?)");

            for (Map.Entry<String, List<String>> entry : addPartSpaces.entrySet()) {
                ps.setInt(1, 0);
                ps.setLong(2, tblID);

                Array spaceArray = con.createArrayOf("varchar", entry.getValue().toArray());
                ps.setArray(4, spaceArray);
                ps.setString(3, entry.getKey());

                ps.addBatch();
            }
            ps.executeBatch();

            if (!tblType.equalsIgnoreCase("EXTERNAL_TABLE")) {
                for (String partName : partToAdd) {
                    if (tblLocation == null || tblLocation.trim().isEmpty()) {
                        pathToMake.addAll(wh.getPriPartitionPaths(dbName, tblName, partName, subPartNameSet));
                    } else {
                        pathToMake.addAll(Warehouse.getPriPartitionPaths(new Path(tblLocation), partName,
                                subPartNameSet));
                    }
                }
            } else {
                for (String partName : partToAdd) {
                    pathToMake.addAll(
                            Warehouse.getPriPartitionPaths(new Path(tblLocation), partName, subPartNameSet));
                }
            }
        } else if (addPartitionDesc.getLevel() == 1) {
            sql = "SELECT tbl_id, tbl_type, sub_part_type, sub_part_key, tbl_format, tbl_location"
                    + " from TBLS where db_name='" + dbName.toLowerCase() + "' and tbl_name='"
                    + tblName.toLowerCase() + "'";

            tblSet = stmt.executeQuery(sql);
            isTblFind = false;

            while (tblSet.next()) {
                isTblFind = true;
                tblID = tblSet.getLong(1);
                tblType = tblSet.getString(2);
                subPartKey = tblSet.getString(4);
                subPartType = tblSet.getString(3);
                tblFormat = tblSet.getString(5);
                tblLocation = tblSet.getString(6);

                if (subPartType != null && !subPartType.isEmpty()) {
                    hasSubPart = true;
                }

                break;
            }

            tblSet.close();
            if (!isTblFind) {
                LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                        + addPartitionDesc.getLevel() + ", msg=" + "can not find table " + dbName + ":"
                        + tblName);

                throw new MetaException("can not find table " + dbName + ":" + tblName);
            }

            if (!tblType.equalsIgnoreCase("MANAGED_TABLE")) {
                if (tblType.equalsIgnoreCase("EXTERNAL_TABLE") && tblFormat != null
                        && tblFormat.equalsIgnoreCase("pgdata")) {
                    LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                            + addPartitionDesc.getLevel() + ", msg=" + tblType + ":" + tblFormat
                            + " can not support alter partition");
                    throw new MetaException(tblType + ":" + tblFormat + " can not support alter partition");
                }

                if (externalPartition && tblType.equalsIgnoreCase("EXTERNAL_TABLE")
                        && (tblFormat == null || !tblFormat.equalsIgnoreCase("pgdata"))) {
                } else {
                    LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                            + addPartitionDesc.getLevel() + ", msg=" + tblType
                            + " can not support alter partition");

                    throw new MetaException(tblType + " can not support alter partition");
                }
            }

            if (!hasSubPart) {
                LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                        + addPartitionDesc.getLevel() + ", msg=" + "table " + dbName + ":" + tblName
                        + " is not sun-partitioned");

                throw new MetaException("table " + dbName + ":" + tblName + " is not sun-partitioned");
            }

            sql = "SELECT type_name from COLUMNS where tbl_id=" + tblID + " and column_name='"
                    + subPartKey.toLowerCase() + "'";

            isColFind = false;
            ResultSet colSet = stmt.executeQuery(sql);
            while (colSet.next()) {
                isColFind = true;
                subKeyType = colSet.getString(1);
                break;
            }

            colSet.close();

            if (!isColFind) {
                LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                        + addPartitionDesc.getLevel() + ", msg=" + "can not find partition key information "
                        + priPartKey);

                throw new MetaException("can not find partition key information " + priPartKey);
            }

            pti = new PrimitiveTypeInfo();
            pti.setTypeName(subKeyType);
            StringIO = PrimitiveObjectInspectorFactory
                    .getPrimitiveJavaObjectInspector(PrimitiveCategory.STRING);
            ValueIO = PrimitiveObjectInspectorFactory
                    .getPrimitiveWritableObjectInspector(pti.getPrimitiveCategory());
            converter1 = ObjectInspectorConverters.getConverter(StringIO, ValueIO);
            converter2 = ObjectInspectorConverters.getConverter(StringIO, ValueIO);

            if ((addPartitionDesc.getPartType().equalsIgnoreCase("RANGE_PARTITION")
                    && !subPartType.equalsIgnoreCase("range"))
                    || (addPartitionDesc.getPartType().equalsIgnoreCase("LIST_PARTITION")
                            && !subPartType.equalsIgnoreCase("list"))) {
                LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                        + addPartitionDesc.getLevel() + ", msg=" + "you can not add  a "
                        + addPartitionDesc.getPartType() + " partition, but the sub-partition type is "
                        + subPartType);

                throw new MetaException("you can not add  a " + addPartitionDesc.getPartType()
                        + " partition, but the sub-partition type is " + subPartType);
            }

            LinkedHashMap<String, List<String>> partSpaces = new LinkedHashMap<String, List<String>>();
            Set<String> partNameSet = new TreeSet<String>();

            sql = "SELECT level,  part_name, part_values from PARTITIONS where" + " tbl_id=" + tblID;// + " order by level asc";

            ResultSet partSet = stmt.executeQuery(sql);
            int partLevel = 0;

            while (partSet.next()) {
                partLevel = partSet.getInt(1);

                if (partLevel == 1) {
                    String partName = partSet.getString(2);
                    List<String> valueList = new ArrayList<String>();
                    Array spaceArray = partSet.getArray(3);

                    ResultSet priValueSet = spaceArray.getResultSet();

                    while (priValueSet.next()) {
                        valueList.add(priValueSet.getString(2));
                    }
                    partSpaces.put(partName, valueList);
                } else if (partLevel == 0) {
                    String partName = partSet.getString(2);
                    partNameSet.add(partName);
                }
            }

            partToAdd = new ArrayList<String>();

            LinkedHashMap<String, List<String>> addPartSpaces = (LinkedHashMap<String, List<String>>) addPartitionDesc
                    .getParSpaces();

            Iterator<String> itr = addPartSpaces.keySet().iterator();

            while (itr.hasNext()) {
                String key = itr.next().toLowerCase();
                if (partSpaces.containsKey(key)) {
                    LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                            + addPartitionDesc.getLevel() + ", msg=" + "table : " + tblName
                            + " have already contain a sub parititon named: " + key);

                    throw new MetaException(
                            "table : " + tblName + " have already contain a sub parititon named: " + key);
                }

                if (key.equalsIgnoreCase("default")) {
                    LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                            + addPartitionDesc.getLevel() + ", msg="
                            + "use : 'alter table tblname add default subpartition' to add default subpartition!");

                    throw new MetaException(
                            "use : 'alter table tblname add default subpartition' to add default subpartition!");
                }
                partToAdd.add(key);
            }

            Iterator<List<String>> listItr = addPartSpaces.values().iterator();

            while (listItr.hasNext()) {
                Iterator<String> valueItr = listItr.next().iterator();
                if (valueItr.hasNext()) {
                    String value = valueItr.next();

                    if (converter1.convert(value) == null) {
                        LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                                + addPartitionDesc.getLevel() + ", msg=" + "value : " + value
                                + " should be type of " + priKeyType);

                        throw new MetaException("value : " + value + " should be type of " + priKeyType);
                    }

                    Iterator<List<String>> PartValuesItr = partSpaces.values().iterator();
                    while (PartValuesItr.hasNext()) {
                        if (PartValuesItr.next().contains(value)) {
                            LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                                    + addPartitionDesc.getLevel() + ", msg=" + "table : " + tblName
                                    + " have already contain a sub partition contain value: " + value);

                            throw new MetaException("table : " + tblName
                                    + " have already contain a sub partition contain value: " + value);
                        }
                    }
                }
            }

            ps = con.prepareStatement(
                    "INSERT INTO partitions(level, tbl_id, " + " part_name, part_values) values(?,?,?,?)");

            for (Map.Entry<String, List<String>> entry : addPartSpaces.entrySet()) {
                ps.setInt(1, 1);
                ps.setLong(2, tblID);

                Array spaceArray = con.createArrayOf("varchar", entry.getValue().toArray());
                ps.setArray(4, spaceArray);
                ps.setString(3, entry.getKey());

                ps.addBatch();
            }
            ps.executeBatch();

            if (!tblType.equalsIgnoreCase("EXTERNAL_TABLE")) {
                for (String partName : partToAdd) {
                    if (tblLocation == null || tblLocation.trim().isEmpty()) {
                        pathToMake.addAll(wh.getSubPartitionPaths(dbName, tblName, partNameSet, partName));
                    } else {
                        pathToMake.addAll(
                                Warehouse.getSubPartitionPaths(new Path(tblLocation), partNameSet, partName));
                    }
                }
            } else {
                for (String partName : partToAdd) {
                    pathToMake.addAll(
                            Warehouse.getSubPartitionPaths(new Path(tblLocation), partNameSet, partName));
                }
            }
        }

        con.commit();
        success = true;
    } catch (SQLException ex) {
        ex.printStackTrace();
        LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                + addPartitionDesc.getLevel() + ", msg=" + ex.getMessage());

        throw new MetaException(ex.getMessage());
    } finally {
        if (!success) {
            try {
                con.rollback();
            } catch (SQLException e) {
            }

            if (isPathMaked) {
                for (Path path : pathToMake) {
                    wh.deleteDir(path, false);
                }
            }
        }

        closeStatement(ps);
        closeConnection(con);
    }

    if (success) {
        boolean mkDirOK = false;
        List<Path> createdPath = new ArrayList<Path>();
        try {
            for (Path path : pathToMake) {
                mkDirOK = wh.mkdirs(path);
                if (!mkDirOK) {
                    break;
                }

                createdPath.add(path);
            }
        } catch (Exception x) {
            mkDirOK = false;
        }

        if (!mkDirOK) {
            dropPartitionMeta(dbName, tblID, addPartitionDesc);
            if (!createdPath.isEmpty()) {
                for (Path path : createdPath) {
                    wh.deleteDir(path, true);
                }
            }

            throw new MetaException("can not create hdfs path, add partition failed");
        }

    }
}

From source file:org.apache.hadoop.hive.metastore.MyXid.java

@Override
public void addDefaultPartition(String dbName, String tblName, int level)
        throws InvalidObjectException, MetaException {
    boolean success = false;

    Connection con = null;
    Statement ps = null;/* ww  w  .j  av  a 2 s.  c  o  m*/
    PreparedStatement pss = null;

    dbName = dbName.toLowerCase();
    tblName = tblName.toLowerCase();

    boolean isPathMaked = false;
    ArrayList<Path> pathToMake = new ArrayList<Path>();
    Warehouse wh = new Warehouse(hiveConf);
    long tblID = 0;

    try {
        con = getSegmentConnection(dbName);
    } catch (MetaStoreConnectException e1) {
        LOG.error("add default partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + level
                + ", msg=" + e1.getMessage());
        throw new MetaException(e1.getMessage());
    } catch (SQLException e1) {
        LOG.error("add default partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + level
                + ", msg=" + e1.getMessage());
        throw new MetaException(e1.getMessage());
    }

    try {
        con.setAutoCommit(false);
        con.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ);
        ps = con.createStatement();

        String tblType = null;
        boolean hasPriPart = false;
        boolean hasSubPart = false;
        String priPartKey = null;
        String subPartKey = null;
        String priPartType = null;
        String subPartType = null;

        String tblFormat = null;
        String tblLocation = null;

        String priKeyType = null;
        String subKeyType = null;
        ResultSet tblSet = null;
        boolean isTblFind = false;

        ArrayList<String> partToAdd = new ArrayList<String>();
        String sql = null;

        HiveConf hconf = (HiveConf) hiveConf;
        boolean externalPartition = hconf.getBoolVar(HiveConf.ConfVars.HIVESUPPORTEXTERNALPARTITION);

        if (level == 0) {
            sql = "SELECT tbl_id, tbl_type, pri_part_type, pri_part_key, tbl_format, tbl_location"
                    + " from TBLS where db_name='" + dbName + "' and tbl_name='" + tblName + "'";

            tblSet = ps.executeQuery(sql);
            isTblFind = false;

            while (tblSet.next()) {
                isTblFind = true;
                tblID = tblSet.getLong(1);
                tblType = tblSet.getString(2);
                priPartKey = tblSet.getString(4);
                priPartType = tblSet.getString(3);
                tblFormat = tblSet.getString(5);
                tblLocation = tblSet.getString(6);

                if (priPartType != null && !priPartType.isEmpty()) {
                    hasPriPart = true;
                }
                break;
            }

            tblSet.close();

            if (!isTblFind) {
                LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + level
                        + ", msg=" + "can not find table " + dbName + ":" + tblName);

                throw new MetaException("can not find table " + dbName + ":" + tblName);
            }

            if (!tblType.equalsIgnoreCase("MANAGED_TABLE")) {
                if (tblType.equalsIgnoreCase("EXTERNAL_TABLE") && tblFormat != null
                        && tblFormat.equalsIgnoreCase("pgdata")) {
                    LOG.error("add default partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                            + level + ", msg=" + tblType + ":" + tblFormat
                            + " can not support alter partition");
                    throw new MetaException(tblType + ":" + tblFormat + " can not support alter partition");
                }

                if (externalPartition && tblType.equalsIgnoreCase("EXTERNAL_TABLE")
                        && (tblFormat == null || !tblFormat.equalsIgnoreCase("pgdata"))) {
                } else {
                    LOG.error("add default partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                            + level + ", msg=" + tblType + " can not support alter partition");

                    throw new MetaException(tblType + " can not support alter partition");
                }
            }

            if (!hasPriPart) {
                LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + level
                        + ", msg=" + "table " + dbName + ":" + tblName + " is not pri-partitioned");

                throw new MetaException("table " + dbName + ":" + tblName + " is not pri-partitioned");
            }

            List<String> partNames = new ArrayList<String>();
            Set<String> subPartNameSet = new TreeSet<String>();

            sql = "SELECT level, part_name from PARTITIONS where" + " tbl_id=" + tblID + " order by level asc";

            ResultSet partSet = ps.executeQuery(sql);
            int partLevel = 0;

            while (partSet.next()) {
                partLevel = partSet.getInt(1);

                if (partLevel == 0) {
                    String partName = partSet.getString(2);
                    partNames.add(partName);
                } else if (partLevel == 1) {
                    String partName = partSet.getString(2);
                    subPartNameSet.add(partName);
                }
            }
            partSet.close();

            if (partNames.contains("default")) {
                LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + level
                        + ", msg=" + "table : " + tblName
                        + " have already contain a pri parititon named: default");

                throw new MetaException(
                        "table : " + tblName + " have already contain a pri parititon named: default");
            }

            pss = con.prepareStatement(
                    "INSERT INTO partitions(level, tbl_id, " + " part_name, part_values) values(?,?,?,?)");

            pss.setInt(1, 0);

            pss.setLong(2, tblID);
            pss.setString(3, "default");

            Array spaceArray = con.createArrayOf("varchar", new ArrayList<String>().toArray());
            pss.setArray(4, spaceArray);

            pss.executeUpdate();

            if (!tblType.equalsIgnoreCase("EXTERNAL_TABLE")) {
                if (tblLocation == null || tblLocation.trim().isEmpty()) {
                    pathToMake.addAll(wh.getPriPartitionPaths(dbName, tblName, "default", subPartNameSet));
                } else {
                    pathToMake.addAll(
                            Warehouse.getPriPartitionPaths(new Path(tblLocation), "default", subPartNameSet));
                }
            } else {
                pathToMake.addAll(
                        Warehouse.getPriPartitionPaths(new Path(tblLocation), "default", subPartNameSet));
            }
        } else if (level == 1) {
            sql = "SELECT tbl_id, tbl_type, sub_part_type, sub_part_key, tbl_format, tbl_location"
                    + " from TBLS where db_name='" + dbName.toLowerCase() + "' and tbl_name='"
                    + tblName.toLowerCase() + "'";

            tblSet = ps.executeQuery(sql);
            isTblFind = false;

            while (tblSet.next()) {
                isTblFind = true;
                tblID = tblSet.getLong(1);
                tblType = tblSet.getString(2);
                subPartKey = tblSet.getString(4);
                subPartType = tblSet.getString(3);
                tblFormat = tblSet.getString(5);
                tblLocation = tblSet.getString(6);

                if (subPartType != null && !subPartType.isEmpty()) {
                    hasSubPart = true;
                }

                break;
            }

            tblSet.close();

            if (!isTblFind) {
                LOG.error("add default partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + level
                        + ", msg=" + "can not find table " + dbName + ":" + tblName);

                throw new MetaException("can not find table " + dbName + ":" + tblName);
            }

            if (!tblType.equalsIgnoreCase("MANAGED_TABLE")) {
                if (tblType.equalsIgnoreCase("EXTERNAL_TABLE") && tblFormat != null
                        && tblFormat.equalsIgnoreCase("pgdata")) {
                    LOG.error("add default partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                            + level + ", msg=" + tblType + ":" + tblFormat
                            + " can not support alter partition");
                    throw new MetaException(tblType + ":" + tblFormat + " can not support alter partition");
                }
                if (externalPartition && tblType.equalsIgnoreCase("EXTERNAL_TABLE")
                        && (tblFormat == null || !tblFormat.equalsIgnoreCase("pgdata"))) {
                } else {
                    LOG.error("add default partition error, db=" + dbName + ", tbl=" + tblName + ", level="
                            + level + ", msg=" + tblType + " can not support alter partition");

                    throw new MetaException(tblType + " can not support alter partition");
                }
            }

            if (!hasSubPart) {
                LOG.error("add default partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + level
                        + ", msg=" + "table " + dbName + ":" + tblName + " is not sun-partitioned");

                throw new MetaException("table " + dbName + ":" + tblName + " is not sun-partitioned");
            }

            List<String> partNames = new ArrayList<String>();
            Set<String> partNameSet = new TreeSet<String>();

            sql = "SELECT level,  part_name from PARTITIONS where" + " tbl_id=" + tblID + " order by level asc";
            ResultSet partSet = ps.executeQuery(sql);
            int partLevel = 0;

            while (partSet.next()) {
                partLevel = partSet.getInt(1);

                if (partLevel == 1) {
                    String partName = partSet.getString(2);
                    partNames.add(partName);
                } else if (partLevel == 0) {
                    String partName = partSet.getString(2);
                    partNameSet.add(partName);
                }
            }

            partSet.close();

            if (partNames.contains("default")) {
                LOG.error("add default partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + level
                        + ", msg=" + "table : " + tblName
                        + " have already contain a sub parititon named: default");

                throw new MetaException(
                        "table : " + tblName + " have already contain a sub parititon named: default");
            }

            pss = con.prepareStatement(
                    "INSERT INTO partitions(level, tbl_id, " + " part_name, part_values) values(?,?,?,?)");

            pss.setInt(1, 1);
            pss.setLong(2, tblID);
            pss.setString(3, "default");

            Array spaceArray = con.createArrayOf("varchar", new ArrayList<String>().toArray());
            pss.setArray(4, spaceArray);

            pss.executeUpdate();

            if (!tblType.equalsIgnoreCase("EXTERNAL_TABLE")) {
                if (tblLocation == null || tblLocation.trim().isEmpty()) {
                    pathToMake.addAll(wh.getSubPartitionPaths(dbName, tblName, partNameSet, "default"));
                } else {
                    pathToMake.addAll(
                            Warehouse.getSubPartitionPaths(new Path(tblLocation), partNameSet, "default"));
                }
            } else {
                pathToMake
                        .addAll(Warehouse.getSubPartitionPaths(new Path(tblLocation), partNameSet, "default"));
            }
        }

        con.commit();
        success = true;
    } catch (SQLException ex) {
        LOG.error("add partition error, db=" + dbName + ", tbl=" + tblName + ", level=" + level + ", msg="
                + ex.getMessage());

        throw new MetaException(ex.getMessage());
    } finally {
        if (!success) {
            try {
                con.rollback();
            } catch (SQLException e) {
            }

            if (isPathMaked) {
                for (Path path : pathToMake) {
                    wh.deleteDir(path, false);
                }
            }
        }

        closeStatement(ps);
        closeStatement(pss);
        closeConnection(con);
    }

    if (success) {
        boolean mkDirOK = false;
        List<Path> createdPath = new ArrayList<Path>();
        try {
            for (Path path : pathToMake) {
                mkDirOK = wh.mkdirs(path);
                if (!mkDirOK) {
                    break;
                }

                createdPath.add(path);
            }
        } catch (Exception x) {
            mkDirOK = false;
        }

        if (!mkDirOK) {
            dropPartitionMeta(dbName, tblID, "default", level);
            if (!createdPath.isEmpty()) {
                for (Path path : createdPath) {
                    wh.deleteDir(path, true);
                }
            }
        }

    }

}

From source file:org.apache.phoenix.schema.stats.BaseStatsCollectorIT.java

@Test
public void testUpdateStats() throws Exception {
    Connection conn;
    PreparedStatement stmt;/*  ww w . ja  v  a  2  s  . c om*/
    ResultSet rs;
    Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
    conn = getConnection();
    conn.createStatement().execute("CREATE TABLE " + fullTableName
            + " ( k VARCHAR, a_string_array VARCHAR(100) ARRAY[4], b_string_array VARCHAR(100) ARRAY[4] \n"
            + " CONSTRAINT pk PRIMARY KEY (k, b_string_array DESC))" + tableDDLOptions);
    String[] s;
    Array array;
    conn = upsertValues(props, fullTableName);
    collectStatistics(conn, fullTableName);
    rs = conn.createStatement().executeQuery("EXPLAIN SELECT k FROM " + fullTableName);
    rs.next();
    long rows1 = (Long) rs.getObject(PhoenixRuntime.EXPLAIN_PLAN_ESTIMATED_ROWS_READ_COLUMN);
    stmt = upsertStmt(conn, fullTableName);
    stmt.setString(1, "z");
    s = new String[] { "xyz", "def", "ghi", "jkll", null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(2, array);
    s = new String[] { "zya", "def", "ghi", "jkll", null, null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(3, array);
    stmt.execute();
    conn.commit();
    collectStatistics(conn, fullTableName);
    rs = conn.createStatement().executeQuery("EXPLAIN SELECT k FROM " + fullTableName);
    rs.next();
    long rows2 = (Long) rs.getObject(PhoenixRuntime.EXPLAIN_PLAN_ESTIMATED_ROWS_READ_COLUMN);
    assertNotEquals(rows1, rows2);
    conn.close();
}

From source file:org.apache.phoenix.schema.stats.BaseStatsCollectorIT.java

private Connection upsertValues(Properties props, String tableName)
        throws SQLException, IOException, InterruptedException {
    Connection conn;
    PreparedStatement stmt;/*from  ww w . j av  a 2  s.c om*/
    conn = getConnection();
    stmt = upsertStmt(conn, tableName);
    stmt.setString(1, "a");
    String[] s = new String[] { "abc", "def", "ghi", "jkll", null, null, "xxx" };
    Array array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(2, array);
    s = new String[] { "abc", "def", "ghi", "jkll", null, null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(3, array);
    stmt.execute();
    conn.commit();
    stmt = upsertStmt(conn, tableName);
    stmt.setString(1, "b");
    s = new String[] { "xyz", "def", "ghi", "jkll", null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(2, array);
    s = new String[] { "zya", "def", "ghi", "jkll", null, null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(3, array);
    stmt.execute();
    conn.commit();
    stmt = upsertStmt(conn, tableName);
    stmt.setString(1, "c");
    s = new String[] { "xyz", "def", "ghi", "jkll", null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(2, array);
    s = new String[] { "zya", "def", "ghi", "jkll", null, null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(3, array);
    stmt.execute();
    conn.commit();
    stmt = upsertStmt(conn, tableName);
    stmt.setString(1, "d");
    s = new String[] { "xyz", "def", "ghi", "jkll", null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(2, array);
    s = new String[] { "zya", "def", "ghi", "jkll", null, null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(3, array);
    stmt.execute();
    conn.commit();
    stmt = upsertStmt(conn, tableName);
    stmt.setString(1, "b");
    s = new String[] { "xyz", "def", "ghi", "jkll", null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(2, array);
    s = new String[] { "zya", "def", "ghi", "jkll", null, null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(3, array);
    stmt.execute();
    conn.commit();
    stmt = upsertStmt(conn, tableName);
    stmt.setString(1, "e");
    s = new String[] { "xyz", "def", "ghi", "jkll", null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(2, array);
    s = new String[] { "zya", "def", "ghi", "jkll", null, null, null, "xxx" };
    array = conn.createArrayOf("VARCHAR", s);
    stmt.setArray(3, array);
    stmt.execute();
    conn.commit();
    return conn;
}