Example usage for com.google.common.io Files toString

List of usage examples for com.google.common.io Files toString

Introduction

In this page you can find the example usage for com.google.common.io Files toString.

Prototype

public static String toString(File file, Charset charset) throws IOException 

Source Link

Usage

From source file:hu.skawa.migrator_maven_plugin.DependencyExport.java

public void execute() throws MojoExecutionException {
    Set<Artifact> artifacts = project.getArtifacts();
    for (Artifact arti : artifacts) {
        File file = arti.getFile();
        String hash = "";
        try {/*w w w.java  2  s . c o m*/
            byte[] contents = Files.toByteArray(file);
            hash = Hashing.sha1().hashBytes(contents).toString();
        } catch (IOException e) {
            throw new MojoExecutionException("Dependency could not be hashed!", e);
        }
        InternalDependency id = new InternalDependency(arti.getGroupId(), arti.getArtifactId(),
                arti.getVersion(), hash);
        File remotes = new File(file.getParent() + File.separator + "_remote.repositories");
        try {
            String remoteDescriptorContent = Files.toString(remotes, StandardCharsets.UTF_8);
            getLog().debug(remoteDescriptorContent);
            Matcher jarServerMatcher = jarPattern.matcher(remoteDescriptorContent);
            while (jarServerMatcher.find()) {
                String server = jarServerMatcher.group(1);
                if (server != null) {
                    id.setJarServer(server);
                } else {
                    id.setJarServer("");
                }
            }
        } catch (IOException e) {
            getLog().warn(
                    "Could not locate repository file for " + arti.getArtifactId() + ", setting to empty!");
            id.setJarServer("");
        }
        allDependencies.add(id);
    }

    if (outputFilePrefix != null) {
        File directives = new File(outputFilePrefix + "-" + project.getName() + "-directives");
        File references = new File(outputFilePrefix + "-" + project.getName() + "-references");

        try (FileWriter directiveWriter = new FileWriter(directives);
                FileWriter referenceWriter = new FileWriter(references);) {
            for (InternalDependency dep : allDependencies) {
                if (outputDirectives) {
                    directiveWriter.append(dep.toBazelDirective(addHashes, addServers));
                    directiveWriter.append("\n");
                }
                if (outputReferences) {
                    referenceWriter.append(dep.getArtifactId() + ": @" + dep.getBazelName() + "//jar");
                    referenceWriter.append("\n");
                }
            }
        } catch (IOException e) {
            getLog().error(e);
        }
    } else {
        for (InternalDependency dep : allDependencies) {
            getLog().info(dep.toBazelDirective(addHashes, addServers));
        }
    }
}

From source file:org.eclipse.xtext.xbase.lib.ArithmeticExtensionGenerator.java

public void generate() {
    try {/*from ww w.j av a  2s  . c  o m*/
        final String path = "../org.eclipse.xtext.xbase.lib/src/org/eclipse/xtext/xbase/lib/";
        File _file = new File(path);
        _file.mkdirs();
        for (final String type : this.types) {
            {
                String _className = this.className(type);
                String _plus = (path + _className);
                String _plus_1 = (_plus + ".java");
                final File file = new File(_plus_1);
                CharSequence _xifexpression = null;
                boolean _exists = file.exists();
                if (_exists) {
                    String _xblockexpression = null;
                    {
                        final String content = Files.toString(file, Charsets.ISO_8859_1);
                        StringConcatenation _builder = new StringConcatenation();
                        String _startMarker = this.startMarker();
                        int _indexOf = content.indexOf(_startMarker);
                        String _substring = content.substring(0, _indexOf);
                        _builder.append(_substring, "");
                        _builder.newLineIfNotEmpty();
                        _builder.append("\t");
                        CharSequence _generateAllOperations = this.generateAllOperations(type);
                        _builder.append(_generateAllOperations, "\t");
                        _builder.newLineIfNotEmpty();
                        String _endMarker = this.endMarker();
                        int _indexOf_1 = content.indexOf(_endMarker);
                        String _endMarker_1 = this.endMarker();
                        int _length = _endMarker_1.length();
                        int _plus_2 = (_indexOf_1 + _length);
                        String _substring_1 = content.substring(_plus_2);
                        _builder.append(_substring_1, "");
                        _xblockexpression = _builder.toString();
                    }
                    _xifexpression = _xblockexpression;
                } else {
                    _xifexpression = this.generate(type);
                }
                final CharSequence newContent = _xifexpression;
                final StringConcatenation result = new StringConcatenation("\n");
                result.append(newContent);
                Files.write(result, file, Charsets.ISO_8859_1);
            }
        }
    } catch (Throwable _e) {
        throw Exceptions.sneakyThrow(_e);
    }
}

From source file:org.jclouds.snia.cdmi.v1.options.CreateDataObjectOptions.java

/**
 * Create CDMI data object with file value
 * /*  ww  w.j  a  v  a 2  s. c o  m*/
 * @param value
 *           File File is converted to a String value with charset UTF_8
 * @return CreateDataObjectOptions
 */
public CreateDataObjectOptions value(File value) throws IOException {
    jsonObjectBody.addProperty("value", (value == null) ? "" : Files.toString(value, Charsets.UTF_8));
    this.payload = jsonObjectBody.toString();
    return this;
}

From source file:se.kth.karamel.backend.ClusterDefinitionService.java

public static String loadYaml(String clusterName) throws KaramelException {
    try {/*from ww  w.  j  a v  a2  s  .  c  o m*/
        String name = clusterName.toLowerCase();
        File folder = new File(Settings.CLUSTER_ROOT_PATH(name));
        if (!folder.exists()) {
            throw new KaramelException(String.format("cluster '%s' is not available", name));
        }
        String yamlPath = Settings.CLUSTER_YAML_PATH(name);
        File file = new File(yamlPath);
        if (!file.exists()) {
            throw new KaramelException(String.format("yaml '%s' is not available", yamlPath));
        }
        String yaml = Files.toString(file, Charsets.UTF_8);
        return yaml;
    } catch (IOException ex) {
        throw new KaramelException("Could not save the yaml ", ex);
    }
}

From source file:com.shmsoft.dmass.main.MRFreeEedProcess.java

@Override
public int run(String[] args) throws Exception {
    // inventory dir holds all package (zip) files resulting from stage
    String projectFileName = args[0];
    String outputPath = args[1];//from ww  w  . j  a va  2s  .  com
    logger.info("Running Hadoop job");
    logger.info("Input project file = " + projectFileName);
    logger.info("Output path = " + outputPath);

    // Hadoop configuration class
    Configuration configuration = getConf();
    // No speculative execution! Do not process the same file twice
    configuration.set("mapred.reduce.tasks.speculative.execution", "false");
    // TODO even in local mode, the first argument should not be the inventory
    // but write a complete project file instead
    Project project = Project.getProject();
    if (project == null || project.isEmpty()) {
        // configure Hadoop input files
        System.out.println("Reading project file " + projectFileName);
        project = new Project().loadFromFile(new File(projectFileName));
        Project.setProject(project);
    }
    project.setProperty(ParameterProcessing.OUTPUT_DIR_HADOOP, outputPath);
    // send complete project information to all mappers and reducers
    configuration.set(ParameterProcessing.PROJECT, project.toString());

    Settings.load();
    configuration.set(ParameterProcessing.SETTINGS_STR, Settings.getSettings().toString());
    configuration.set(ParameterProcessing.METADATA_FILE,
            Files.toString(new File(ColumnMetadata.metadataNamesFile), Charset.defaultCharset()));
    Job job = new Job(configuration);
    job.setJarByClass(MRFreeEedProcess.class);
    job.setJobName("MRFreeEedProcess");

    // Hadoop processes key-value pairs
    job.setOutputKeyClass(MD5Hash.class);
    job.setOutputValueClass(MapWritable.class);

    // set map and reduce classes
    job.setMapperClass(Map.class);
    job.setReducerClass(Reduce.class);

    // Hadoop TextInputFormat class
    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(TextOutputFormat.class);

    //        String delim = "\u0001";
    //        configuration.set("mapred.textoutputformat.separator", delim);
    //        configuration.set("mapreduce.output.textoutputformat.separator", delim);

    logger.debug("project.isEnvHadoop() = {} ", project.isEnvHadoop());
    String inputPath = projectFileName;
    if (project.isEnvHadoop() || Settings.getSettings().isHadoopDebug()) {
        inputPath = formInputPath(project);
    }

    logger.debug("Ready to run, inputPath = {}, outputPath = {}", inputPath, outputPath);
    FileInputFormat.setInputPaths(job, inputPath);
    FileOutputFormat.setOutputPath(job, new Path(outputPath));

    SHMcloudLogging.init(false);

    if (Settings.getSettings().isHadoopDebug()) {
        if (new File(outputPath).exists()) {
            Util.deleteDirectory(new File(outputPath));
        }
    }

    SolrIndex.getInstance().init();

    boolean success = job.waitForCompletion(true);
    if (project.isEnvHadoop() && project.isFsS3()) {
        transferResultsToS3(outputPath);
    }

    SolrIndex.getInstance().destroy();

    return success ? 0 : 1;
}

From source file:org.graylog2.rules.DroolsEngine.java

@Override
public synchronized boolean addRulesFromFile(String rulesFile) {
    LOG.debug("Adding drools rules from file {}", rulesFile);
    try {//from  w w w .  j av  a  2  s  . c  om
        final String rulesSource = Files.toString(new File(rulesFile), StandardCharsets.UTF_8);
        return addRule(rulesSource);
    } catch (IOException e) {
        LOG.warn("Could not read drools source file. Not loading rules: {}", e.getMessage());
    }
    return false;
}

From source file:org.freeeed.mr.FreeEedMR.java

@Override
public int run(String[] args) throws Exception {
    // inventory dir holds all package (zip) files resulting from stage
    String projectFileName = args[0];
    String outputPath = args[1];/*w w  w .j ava  2s .  c  om*/
    LOGGER.info("Running Hadoop job");
    LOGGER.info("Input project file = " + projectFileName);
    LOGGER.info("Output path = " + outputPath);
    Stats.getInstance().setNumberMappers(projectFileName);
    ESIndex.getInstance().init();

    // Hadoop configuration class
    Configuration configuration = getConf();
    // No speculative execution! Do not process the same file twice
    configuration.set("mapred.reduce.tasks.speculative.execution", "false");
    // TODO even in local mode, the first argument should not be the inventory
    // but write a complete project file instead
    Project project = Project.getCurrentProject();
    if (project == null || project.isEmpty()) {
        // configure Hadoop input files
        System.out.println("Reading project file " + projectFileName);
        project = Project.loadFromFile(new File(projectFileName));
    }
    project.setProperty(ParameterProcessing.OUTPUT_DIR_HADOOP, outputPath);
    // send complete project information to all mappers and reducers
    configuration.set(ParameterProcessing.PROJECT, project.toString());

    Settings.load();
    configuration.set(ParameterProcessing.SETTINGS_STR, Settings.getSettings().toString());
    configuration.set(EmailProperties.PROPERTIES_FILE,
            Files.toString(new File(EmailProperties.PROPERTIES_FILE), Charset.defaultCharset()));
    Job job = new Job(configuration);
    job.setJarByClass(FreeEedMR.class);
    job.setJobName("FreeEedMR");

    // Hadoop processes key-value pairs
    //        job.setOutputKeyClass(Text.class);
    //        job.setOutputValueClass(MapWritable.class);

    // set map and reduce classes
    job.setMapperClass(FreeEedMapper.class);
    job.setInputFormatClass(NLineInputFormat.class);
    job.setNumReduceTasks(0);
    // secondary sort for compound keys - this sorts the attachments
    job.setSortComparatorClass(KeyComparator.class);
    job.setGroupingComparatorClass(GroupComparator.class);

    // Hadoop TextInputFormat class
    //        job.setInputFormatClass(TextInputFormat.class);
    //        job.setOutputFormatClass(TextOutputFormat.class);

    LOGGER.debug("project.isEnvHadoop() = {} ", project.isEnvHadoop());
    String inputPath = projectFileName;
    if (project.isEnvHadoop() || Settings.getSettings().isHadoopDebug()) {
        inputPath = formInputPath(project);
    }

    LOGGER.debug("Ready to run, inputPath = {}, outputPath = {}", inputPath, outputPath);
    FileInputFormat.setInputPaths(job, inputPath);
    FileOutputFormat.setOutputPath(job, new Path(outputPath));

    if (Settings.getSettings().isHadoopDebug()) {
        if (new File(outputPath).exists()) {
            Util.deleteDirectory(new File(outputPath));
        }
    }

    LOGGER.trace("Project");
    LOGGER.trace(project.toString());

    boolean success = job.waitForCompletion(true);

    ESIndex.getInstance().destroy();

    if (project.isEnvHadoop() && project.isFsS3()) {
        transferResultsToS3(outputPath);
    }

    return success ? 0 : 1;
}

From source file:org.sonar.css.ast.visitors.SyntaxHighlighterVisitor.java

@Override
public void visitFile(AstNode astNode) {
    if (astNode == null) {
        // parse error
        return;/*from w ww .ja v a2s.  c  om*/
    }

    Resource<?> sonarFile = File.fromIOFile(new java.io.File(peekSourceFile().getKey()), sourceDirs);
    highlighting = perspectives.as(Highlightable.class, sonarFile).newHighlighting();

    lineStart = Lists.newArrayList();
    final String content;
    try {
        content = Files.toString(getContext().getFile(), charset);
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }
    lineStart.add(0);
    for (int i = 0; i < content.length(); i++) {
        if (content.charAt(i) == '\n'
                || (content.charAt(i) == '\r' && i + 1 < content.length() && content.charAt(i + 1) != '\n')) {
            lineStart.add(i + 1);
        }
    }
}

From source file:com.sonar.sslr.api.typed.ActionParser.java

public N parse(File file) {
    try {/*from   w  ww .  j  a v  a  2s  . c o  m*/
        return parse(new Input(Files.toString(file, charset).toCharArray(), file.toURI()));
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }
}

From source file:com.netflix.iep.config.ConfigFile.java

public static String toPropertiesString(Map<String, String> vars, File file) {
    try {/*w  w w. j  av a  2  s  . c om*/
        return toPropertiesString(vars, Files.toString(file, Charsets.UTF_8));
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}