Example usage for org.apache.hadoop.io IOUtils closeStream

List of usage examples for org.apache.hadoop.io IOUtils closeStream

Introduction

In this page you can find the example usage for org.apache.hadoop.io IOUtils closeStream.

Prototype

public static void closeStream(java.io.Closeable stream) 

Source Link

Document

Closes the stream ignoring Throwable .

Usage

From source file:org.apache.hive.common.util.StreamPrinter.java

License:Apache License

@Override
public void run() {
    BufferedReader br = null;/*  w  w  w .  j  a va2s . co  m*/
    try {
        InputStreamReader isr = new InputStreamReader(is);
        br = new BufferedReader(isr);
        String line = null;
        if (type != null) {
            while ((line = br.readLine()) != null) {
                for (PrintStream os : outputStreams) {
                    os.println(type + ">" + line);
                }
            }
        } else {
            while ((line = br.readLine()) != null) {
                for (PrintStream os : outputStreams) {
                    os.println(line);
                }
            }
        }
        br.close();
        br = null;
    } catch (IOException ioe) {
        ioe.printStackTrace();
    } finally {
        IOUtils.closeStream(br);
    }
}

From source file:org.apache.hoya.core.persist.JsonSerDeser.java

License:Apache License

/**
 * Convert from a JSON file/*from  w ww  .  ja v a 2s .  co  m*/
 * @param resource input file
 * @return the parsed JSON
 * @throws IOException IO problems
 * @throws JsonMappingException failure to map from the JSON to this class
 */
public T fromResource(String resource) throws IOException, JsonParseException, JsonMappingException {
    InputStream resStream = null;
    try {
        resStream = this.getClass().getResourceAsStream(resource);
        if (resStream == null) {
            throw new FileNotFoundException(resource);
        }

        return (T) (mapper.readValue(resStream, classType));
    } catch (IOException e) {
        log.error("Exception while parsing json resource {}: {}", resource, e);
        throw e;
    } finally {
        IOUtils.closeStream(resStream);
    }
}

From source file:org.apache.hoya.tools.ConfigHelper.java

License:Apache License

/**
 * Save a config/*w w  w.  j  a v  a2 s  .  c  om*/
 * @param fs filesystem
 * @param destPath dest to save
 * @param confToSave  config to save
 * @throws IOException IO problems
 */
public static void saveConfig(FileSystem fs, Path destPath, Configuration confToSave) throws IOException {
    FSDataOutputStream fos = fs.create(destPath);
    try {
        confToSave.writeXml(fos);
    } finally {
        IOUtils.closeStream(fos);
    }
}

From source file:org.apache.hoya.tools.ConfigHelper.java

License:Apache License

/**
 * Generate a config file in a destination directory on the local filesystem
 * @param confdir the directory path where the file is to go
 * @param filename the filename/*from  ww  w .  ja  v  a2s.com*/
 * @return the destination path
 */
public static File saveConfig(Configuration generatingConf, File confdir, String filename) throws IOException {

    File destPath = new File(confdir, filename);
    OutputStream fos = new FileOutputStream(destPath);
    try {
        generatingConf.writeXml(fos);
    } finally {
        IOUtils.closeStream(fos);
    }
    return destPath;
}

From source file:org.apache.hoya.tools.CoreFileSystem.java

License:Apache License

/**
 * Verify that a user has write access to a directory.
 * It does this by creating then deleting a temp file
 *
 * @param dirPath actual directory to look for
 * @throws FileNotFoundException file not found
 * @throws IOException  trouble with FS/*from   ww w.jav a  2s. co m*/
 * @throws BadClusterStateException if the directory is not writeable
 */
public void verifyDirectoryWriteAccess(Path dirPath) throws IOException, HoyaException {
    verifyPathExists(dirPath);
    Path tempFile = new Path(dirPath, "tmp-file-for-checks");
    try {
        FSDataOutputStream out = null;
        out = fileSystem.create(tempFile, true);
        IOUtils.closeStream(out);
        fileSystem.delete(tempFile, false);
    } catch (IOException e) {
        log.warn("Failed to create file {}: {}", tempFile, e);
        throw new BadClusterStateException(e, "Unable to write to directory %s : %s", dirPath, e.toString());
    }
}

From source file:org.apache.james.mailbox.hbase.HBaseClusterSingleton.java

License:Apache License

/**
 * Delete all rows from specified table.
 *
 * @param tableName//  w  ww  .  j ava 2  s  . com
 */
public void clearTable(String tableName) {
    HTable table = null;
    ResultScanner scanner = null;
    try {
        table = new HTable(conf, tableName);
        Scan scan = new Scan();
        scan.setCaching(1000);
        scanner = table.getScanner(scan);
        Result result;
        while ((result = scanner.next()) != null) {
            Delete delete = new Delete(result.getRow());
            table.delete(delete);
        }
    } catch (IOException ex) {
        LOG.info("Exception clearing table {}", tableName);
    } finally {
        IOUtils.closeStream(scanner);
        IOUtils.closeStream(table);
    }
}

From source file:org.apache.kylin.cube.cli.DictionaryGeneratorCLI.java

License:Apache License

private static void processSegment(KylinConfig config, CubeSegment cubeSeg, String uuid,
        DistinctColumnValuesProvider factTableValueProvider, DictionaryProvider dictProvider)
        throws IOException {
    CubeManager cubeMgr = CubeManager.getInstance(config);

    // dictionary
    for (TblColRef col : cubeSeg.getCubeDesc().getAllColumnsNeedDictionaryBuilt()) {
        logger.info("Building dictionary for " + col);
        IReadableTable inpTable = factTableValueProvider.getDistinctValuesFor(col);

        Dictionary<String> preBuiltDict = null;
        if (dictProvider != null) {
            preBuiltDict = dictProvider.getDictionary(col);
        }//from   w  w  w  .  j  av  a 2s .c  o m

        if (preBuiltDict != null) {
            logger.debug("Dict for '" + col.getName() + "' has already been built, save it");
            cubeMgr.saveDictionary(cubeSeg, col, inpTable, preBuiltDict);
        } else {
            logger.debug(
                    "Dict for '" + col.getName() + "' not pre-built, build it from " + inpTable.toString());
            cubeMgr.buildDictionary(cubeSeg, col, inpTable);
        }
    }

    // snapshot
    Set<String> toSnapshot = Sets.newHashSet();
    Set<TableRef> toCheckLookup = Sets.newHashSet();
    for (DimensionDesc dim : cubeSeg.getCubeDesc().getDimensions()) {
        TableRef table = dim.getTableRef();
        if (cubeSeg.getModel().isLookupTable(table)) {
            // only the snapshot desc is not ext type, need to take snapshot
            if (!cubeSeg.getCubeDesc().isExtSnapshotTable(table.getTableIdentity())) {
                toSnapshot.add(table.getTableIdentity());
                toCheckLookup.add(table);
            }
        }
    }

    for (String tableIdentity : toSnapshot) {
        logger.info("Building snapshot of " + tableIdentity);
        cubeMgr.buildSnapshotTable(cubeSeg, tableIdentity, uuid);
    }

    CubeInstance updatedCube = cubeMgr.getCube(cubeSeg.getCubeInstance().getName());
    cubeSeg = updatedCube.getSegmentById(cubeSeg.getUuid());
    for (TableRef lookup : toCheckLookup) {
        logger.info("Checking snapshot of " + lookup);
        try {
            JoinDesc join = cubeSeg.getModel().getJoinsTree().getJoinByPKSide(lookup);
            ILookupTable table = cubeMgr.getLookupTable(cubeSeg, join);
            if (table != null) {
                IOUtils.closeStream(table);
            }
        } catch (Throwable th) {
            throw new RuntimeException("Checking snapshot of " + lookup + " failed.", th);
        }
    }
}

From source file:org.apache.kylin.engine.mr.common.CubeStatsReader.java

License:Apache License

public CubeStatsReader(CubeSegment cubeSegment, KylinConfig kylinConfig) throws IOException {
    ResourceStore store = ResourceStore.getStore(kylinConfig);
    cuboidScheduler = new CuboidScheduler(cubeSegment.getCubeDesc());
    String statsKey = cubeSegment.getStatisticsResourcePath();
    File tmpSeqFile = writeTmpSeqFile(store.getResource(statsKey).inputStream);
    Reader reader = null;//from   w  ww .  j  a  v a2 s. c  o m

    try {
        Configuration hadoopConf = HadoopUtil.getCurrentConfiguration();

        Path path = new Path(HadoopUtil.fixWindowsPath("file://" + tmpSeqFile.getAbsolutePath()));
        Option seqInput = SequenceFile.Reader.file(path);
        reader = new SequenceFile.Reader(hadoopConf, seqInput);

        int percentage = 100;
        int mapperNumber = 0;
        double mapperOverlapRatio = 0;
        Map<Long, HLLCounter> counterMap = Maps.newHashMap();

        LongWritable key = (LongWritable) ReflectionUtils.newInstance(reader.getKeyClass(), hadoopConf);
        BytesWritable value = (BytesWritable) ReflectionUtils.newInstance(reader.getValueClass(), hadoopConf);
        while (reader.next(key, value)) {
            if (key.get() == 0L) {
                percentage = Bytes.toInt(value.getBytes());
            } else if (key.get() == -1) {
                mapperOverlapRatio = Bytes.toDouble(value.getBytes());
            } else if (key.get() == -2) {
                mapperNumber = Bytes.toInt(value.getBytes());
            } else if (key.get() > 0) {
                HLLCounter hll = new HLLCounter(kylinConfig.getCubeStatsHLLPrecision());
                ByteArray byteArray = new ByteArray(value.getBytes());
                hll.readRegisters(byteArray.asBuffer());
                counterMap.put(key.get(), hll);
            }
        }

        this.seg = cubeSegment;
        this.samplingPercentage = percentage;
        this.mapperNumberOfFirstBuild = mapperNumber;
        this.mapperOverlapRatioOfFirstBuild = mapperOverlapRatio;
        this.cuboidRowEstimatesHLL = counterMap;

    } finally {
        IOUtils.closeStream(reader);
        tmpSeqFile.delete();
    }
}

From source file:org.apache.kylin.engine.mr.common.CubeStatsReader.java

License:Apache License

private File writeTmpSeqFile(InputStream inputStream) throws IOException {
    File tempFile = File.createTempFile("kylin_stats_tmp", ".seq");
    FileOutputStream out = null;//  w ww  .j  a va 2 s  .c o m
    try {
        out = new FileOutputStream(tempFile);
        org.apache.commons.io.IOUtils.copy(inputStream, out);
    } finally {
        IOUtils.closeStream(inputStream);
        IOUtils.closeStream(out);
    }
    return tempFile;
}

From source file:org.apache.kylin.engine.mr.steps.MergeDictionaryMapper.java

License:Apache License

@Override
protected void doMap(IntWritable key, NullWritable value, Context context)
        throws IOException, InterruptedException {

    int index = key.get();

    if (index < tblColRefs.length) {
        // merge dictionary
        TblColRef col = tblColRefs[index];
        List<DictionaryInfo> dictInfos = Lists.newArrayList();
        for (CubeSegment segment : mergingSegments) {
            if (segment.getDictResPath(col) != null) {
                DictionaryInfo dictInfo = dictMgr.getDictionaryInfo(segment.getDictResPath(col));
                if (dictInfo != null && !dictInfos.contains(dictInfo)) {
                    dictInfos.add(dictInfo);
                }/*ww w. j  ava 2s  .  c  om*/
            }
        }

        DictionaryInfo mergedDictInfo = dictMgr.mergeDictionary(dictInfos);
        String tblCol = col.getTableAlias() + ":" + col.getName();
        String dictInfoPath = mergedDictInfo == null ? "" : mergedDictInfo.getResourcePath();

        context.write(new IntWritable(-1), new Text(tblCol + "=" + dictInfoPath));

    } else {
        // merge statistics
        KylinConfig kylinConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(
                new SerializableConfiguration(context.getConfiguration()),
                context.getConfiguration().get(BatchConstants.ARG_META_URL));

        final String cubeName = context.getConfiguration().get(BatchConstants.ARG_CUBE_NAME);
        final String segmentId = context.getConfiguration().get(BatchConstants.ARG_SEGMENT_ID);
        final String statOutputPath = context.getConfiguration()
                .get(MergeDictionaryJob.OPTION_OUTPUT_PATH_STAT.getOpt());
        CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName);

        logger.info("Statistics output path: {}", statOutputPath);

        CubeSegment newSegment = cubeInstance.getSegmentById(segmentId);
        ResourceStore rs = ResourceStore.getStore(kylinConfig);

        Map<Long, HLLCounter> cuboidHLLMap = Maps.newHashMap();
        Configuration conf = null;
        int averageSamplingPercentage = 0;

        for (CubeSegment cubeSegment : mergingSegments) {
            String filePath = cubeSegment.getStatisticsResourcePath();
            InputStream is = rs.getResource(filePath).inputStream;
            File tempFile;
            FileOutputStream tempFileStream = null;

            try {
                tempFile = File.createTempFile(segmentId, ".seq");
                tempFileStream = new FileOutputStream(tempFile);
                org.apache.commons.io.IOUtils.copy(is, tempFileStream);
            } finally {
                IOUtils.closeStream(is);
                IOUtils.closeStream(tempFileStream);
            }

            FileSystem fs = HadoopUtil.getFileSystem("file:///" + tempFile.getAbsolutePath());
            SequenceFile.Reader reader = null;
            try {
                conf = HadoopUtil.getCurrentConfiguration();
                //noinspection deprecation
                reader = new SequenceFile.Reader(fs, new Path(tempFile.getAbsolutePath()), conf);
                LongWritable keyW = (LongWritable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
                BytesWritable valueW = (BytesWritable) ReflectionUtils.newInstance(reader.getValueClass(),
                        conf);

                while (reader.next(keyW, valueW)) {
                    if (keyW.get() == 0L) {
                        // sampling percentage;
                        averageSamplingPercentage += Bytes.toInt(valueW.getBytes());
                    } else if (keyW.get() > 0) {
                        HLLCounter hll = new HLLCounter(kylinConfig.getCubeStatsHLLPrecision());
                        ByteArray byteArray = new ByteArray(valueW.getBytes());
                        hll.readRegisters(byteArray.asBuffer());

                        if (cuboidHLLMap.get(keyW.get()) != null) {
                            cuboidHLLMap.get(keyW.get()).merge(hll);
                        } else {
                            cuboidHLLMap.put(keyW.get(), hll);
                        }
                    }
                }
            } catch (Exception e) {
                e.printStackTrace();
                throw e;
            } finally {
                IOUtils.closeStream(reader);
            }
        }

        averageSamplingPercentage = averageSamplingPercentage / mergingSegments.size();
        CubeStatsWriter.writeCuboidStatistics(conf, new Path(statOutputPath), cuboidHLLMap,
                averageSamplingPercentage);
        Path statisticsFilePath = new Path(statOutputPath,
                BatchConstants.CFG_STATISTICS_CUBOID_ESTIMATION_FILENAME);

        FileSystem fs = HadoopUtil.getFileSystem(statisticsFilePath, conf);
        FSDataInputStream fis = fs.open(statisticsFilePath);

        try {
            // put the statistics to metadata store
            String statisticsFileName = newSegment.getStatisticsResourcePath();
            rs.putResource(statisticsFileName, fis, System.currentTimeMillis());
        } finally {
            IOUtils.closeStream(fis);
        }

        context.write(new IntWritable(-1), new Text(""));
    }
}