Example usage for org.springframework.core.io FileSystemResource FileSystemResource

List of usage examples for org.springframework.core.io FileSystemResource FileSystemResource

Introduction

In this page you can find the example usage for org.springframework.core.io FileSystemResource FileSystemResource.

Prototype

public FileSystemResource(Path filePath) 

Source Link

Document

Create a new FileSystemResource from a Path handle, performing all file system interactions via NIO.2 instead of File .

Usage

From source file:annis.administration.AdministrationDao.java

/**
 * Reads tab seperated files from the filesystem, but it takes only files into
 * account with the {@link DefaultAdministrationDao#REL_ANNIS_FILE_SUFFIX}
 * suffix. Further it is straight forward except for the
 * {@link DefaultAdministrationDao#FILE_RESOLVER_VIS_MAP} and the
 * {@link DefaultAdministrationDao#EXAMPLE_QUERIES_TAB}. This is done by this
 * method automatically./*from   w w  w.  ja v  a  2 s .  c  om*/
 *
 * <ul>
 *
 * <li>{@link DefaultAdministrationDao#FILE_RESOLVER_VIS_MAP}: For backwards
 * compatibility, the columns must be counted, since there exists one
 * additional column for visibility behaviour of visualizers.</li>
 *
 * <li>{@link DefaultAdministrationDao#EXAMPLE_QUERIES_TAB}: Takes into
 * account the state of {@link #generateExampleQueries}.</li>
 *
 * </ul>
 *
 * @param path The path to the ANNIS files. The files have to have this suffix
 * @param version {@link DefaultAdministrationDao#REL_ANNIS_FILE_SUFFIX}
 */
void bulkImport(String path, ANNISFormatVersion version) {
    log.info("bulk-loading data");

    for (String table : importedTables) {
        if (table.equalsIgnoreCase(FILE_RESOLVER_VIS_MAP)) {
            importResolverVisMapTable(path, table);
        }
        // check if example query exists. If not copy it from the resource folder.
        else if (table.equalsIgnoreCase(EXAMPLE_QUERIES_TAB)) {
            File f = new File(path, table + annisFileSuffix);
            if (f.exists()) {
                log.info(table + annisFileSuffix + " file exists");
                bulkloadTableFromResource(tableInStagingArea(table), new FileSystemResource(f));

                if (generateExampleQueries == (EXAMPLE_QUERIES_CONFIG.IF_MISSING)) {
                    generateExampleQueries = EXAMPLE_QUERIES_CONFIG.FALSE;
                }
            } else {
                if (generateExampleQueries == EXAMPLE_QUERIES_CONFIG.IF_MISSING) {
                    generateExampleQueries = EXAMPLE_QUERIES_CONFIG.TRUE;
                }

                log.info(table + annisFileSuffix + " file not found");
            }
        } else if (table.equalsIgnoreCase("node")) {
            bulkImportNode(path, version);
        } else {
            bulkloadTableFromResource(tableInStagingArea(table),
                    new FileSystemResource(new File(path, table + annisFileSuffix)));
        }
    }
}

From source file:annis.administration.AdministrationDao.java

private void bulkImportNode(String path, ANNISFormatVersion version) {
    // check column number by reading first line
    File nodeTabFile = new File(path, "node" + annisFileSuffix);
    try (BufferedReader reader = new BufferedReader(
            new InputStreamReader(new FileInputStream(nodeTabFile), "UTF-8"));) {

        String firstLine = reader.readLine();

        int columnNumber = firstLine == null ? 13 : StringUtils.splitPreserveAllTokens(firstLine, '\t').length;
        if (version == ANNISFormatVersion.V3_3 || version == ANNISFormatVersion.V3_2) {
            // new node table with segmentations
            // no special handling needed
            bulkloadTableFromResource(tableInStagingArea("node"), new FileSystemResource(nodeTabFile));
        } else if (version == ANNISFormatVersion.V3_1) {
            getJdbcTemplate().execute("DROP TABLE IF EXISTS _tmpnode;");
            // old node table without segmentations
            // create temporary table for  bulk import
            getJdbcTemplate().execute("CREATE TEMPORARY TABLE _tmpnode" + "\n(\n" + "id bigint,\n"
                    + "text_ref integer,\n" + "corpus_ref integer,\n" + "namespace varchar,\n"
                    + "name varchar,\n" + "\"left\" integer,\n" + "\"right\" integer,\n"
                    + "token_index integer,\n" + "continuous boolean,\n" + "span varchar\n" + ");");

            bulkloadTableFromResource("_tmpnode", new FileSystemResource(nodeTabFile));

            log.info("copying nodes from temporary helper table into staging area");
            getJdbcTemplate().execute("INSERT INTO " + tableInStagingArea("node") + "\n"
                    + "  SELECT id, text_ref, corpus_ref, namespace AS layer, name, \"left\", "
                    + "\"right\", token_index, "
                    + "NULL AS seg_name, NULL AS seg_left, NULL AS seg_left, continuous, " + "span\n"
                    + "FROM _tmpnode");
        } else {// ww  w  .  j  av a2  s  .co m
            throw new RuntimeException("Illegal number of columns in node" + annisFileSuffix + ", "
                    + "should be 13 or 10 but was " + columnNumber);
        }
    } catch (IOException ex) {
        log.error(null, ex);
    }
}

From source file:gov.nih.nci.ncicb.tcga.dcc.QCLiveTestDataGenerator.java

/**
 * Main entry point for the application. Configures the Spring context and calls the {@link QCLiveTestDataGenerator}
 * bean to load and generate test data for a specific archive name.
 * //w  w  w  .j a  v a 2  s .  c  o  m
 * @param args - list of arguments to be passed to the {@link QCLiveTestDataGenerator} bean
 */
public static void main(final String[] args) {

    // Display help if no arguments are provided, otherwise parse the arguments
    if (args.length == 0)
        displayHelp();
    else {
        try {
            // Parse the command line arguments 
            final CommandLine commandLine = new GnuParser().parse(CommandLineOptionType.getOptions(), args);

            // If the command line instance contains the -? (--help) option display help, otherwise call the QCLiveTestDataGenerator
            // to process the command line arguments
            if (commandLine.hasOption(CommandLineOptionType.HELP.name().toLowerCase())) {
                displayHelp();
            } else {
                final String archiveNameOption = CommandLineOptionType.ARCHIVE_NAME.getOptionValue().getOpt();
                final String sqlScriptFileOption = CommandLineOptionType.SQL_SCRIPT_FILE.getOptionValue()
                        .getOpt();
                final String schemaOption = CommandLineOptionType.SCHEMA.getOptionValue().getOpt();

                // Initialize the Spring context
                final ApplicationContext appCtx = new ClassPathXmlApplicationContext(APP_CONTEXT_FILE_NAME);

                // Retrieve the QCLiveTestDataGenerator from the Spring context
                final QCLiveTestDataGenerator qcLiveTestDataGenerator = (QCLiveTestDataGenerator) appCtx
                        .getBean("qcLiveTestDataGenerator");

                // Get the archive name from the command line argument(s) (if provided) and generate the test data
                if (commandLine.hasOption(archiveNameOption)) {
                    qcLiveTestDataGenerator.generateTestData(commandLine.getOptionValue(archiveNameOption));
                }

                // If the SQL script file and schema options are provided, execute the script
                if (commandLine.hasOption(sqlScriptFileOption)) {
                    if (commandLine.hasOption(schemaOption)) {
                        // Try to resolve the schema type from the provided schema name. If it cannot be resolved, throw an exception that
                        // indicates the supported schema types
                        final String schemaOptionValue = commandLine.getOptionValue(schemaOption);
                        SchemaType schemaTpye = null;
                        try {
                            schemaTpye = SchemaType.valueOf(schemaOptionValue.toUpperCase());
                        } catch (IllegalArgumentException iae) {
                            throw new ParseException("Could not resolve schema name '" + schemaOptionValue
                                    + "' to a supported schema type "
                                    + "when attempting to execute SQL script file '"
                                    + commandLine.getOptionValue(sqlScriptFileOption) + "'. "
                                    + "Supported types are '" + SchemaType.getSupportedSchemaTypes() + "'");
                        }

                        qcLiveTestDataGenerator.executeSQLScriptFile(schemaTpye,
                                new FileSystemResource(commandLine.getOptionValue(sqlScriptFileOption)));
                    } else
                        throw new ParseException(
                                "Setting the -f (or -sql_script_file) option also requires the -s (or -schema) to be set.");
                }
            }
        } catch (ParseException pe) {
            System.err.println("\nParsing failed. Reason: " + pe.getMessage());
            displayHelp();
        } catch (IOException ioe) {
            logger.error(ioe.getMessage());
        } catch (SQLException sqle) {
            logger.error(sqle.getMessage());
        }
    }
}

From source file:de.ingrid.admin.Config.java

/**
 * Try to get the override configuration first from the classpath and
 * otherwise expect it inside the conf directory. The first option is mainly
 * for development, but should also apply for production since the
 * conf-directory also is in the Classpath. With this function the
 * development environment does not need any manual setup anymore, as long
 * as the test-resources is in the classpath.
 * /*from   w w w .j  a  v a2  s. c o m*/
 * @return the resource to the override configuration
 */
private Resource getOverrideConfigResource() {
    ClassPathResource override = new ClassPathResource("config.override.properties");
    try {
        override.getFile();
        return override;
    } catch (FileNotFoundException e) {
        // do nothing here! get file from conf directory (see return value)
    } catch (IOException e) {
        log.error("Error when getting config.override.properties", e);
    }
    return new FileSystemResource("conf/config.override.properties");
}

From source file:annis.administration.DefaultAdministrationDao.java

@Override
public boolean executeSqlFromScript(String script, MapSqlParameterSource args) {
    File fScript = new File(scriptPath, script);
    if (fScript.canRead() && fScript.isFile()) {
        Resource resource = new FileSystemResource(fScript);
        log.debug("executing SQL script: " + resource.getFilename());
        String sql = readSqlFromResource(resource, args);
        jdbcTemplate.execute(sql);// ww w .j  a v a2  s  .c  o  m
        return true;
    } else {
        log.debug("SQL script " + fScript.getName() + " does not exist");
        return false;
    }
}

From source file:annis.administration.DefaultAdministrationDao.java

private <T> T querySqlFromScript(String script, ResultSetExtractor<T> resultSetExtractor) {
    File fScript = new File(scriptPath, script);
    if (fScript.canRead() && fScript.isFile()) {
        Resource resource = new FileSystemResource(fScript);
        log.debug("executing SQL script: " + resource.getFilename());
        String sql = readSqlFromResource(resource, null);
        return jdbcTemplate.query(sql, resultSetExtractor);
    } else {/*from   w w w  .j  a  v  a 2  s.c  om*/
        log.debug("SQL script " + fScript.getName() + " does not exist");
        return null;
    }
}

From source file:architecture.ee.web.spring.controller.SecureWebMgmtDataController.java

protected Resource fileToResource(File file) {
    return new FileSystemResource(file);
}

From source file:org.alfresco.integrations.google.docs.service.GoogleDocsServiceImpl.java

public DriveFile uploadFile(NodeRef nodeRef) throws GoogleDocsAuthenticationException,
        GoogleDocsServiceException, GoogleDocsRefreshTokenException, IOException {
    log.debug("Upload " + nodeRef + " to Google");
    DriveOperations driveOperations = getDriveOperations(getConnection());

    DriveFile driveFile = null;//from  w  ww . j  a va 2 s.com

    // It makes me want to cry that they don't support inputStreams.
    File file = null;

    try {
        // Get the reader
        ContentReader reader = fileFolderService.getReader(nodeRef);

        file = File.createTempFile(nodeRef.getId(), ".tmp", TempFileProvider.getTempDir());
        reader.getContent(file);

        // Get the mimetype
        FileInfo fileInfo = fileFolderService.getFileInfo(nodeRef);
        String mimetype = fileInfo.getContentData().getMimetype();

        // Create the working Directory
        DriveFile workingDir = createWorkingDirectory(nodeRef);

        driveFile = new DriveFile.Builder().setParents(workingDir.getId()).setTitle(fileInfo.getName())
                .setHidden(true).setMimeType(mimetype).build();

        UploadParameters uploadParameters = new UploadParameters().setConvert(true);

        driveFile = driveOperations.upload(new FileSystemResource(file), driveFile, uploadParameters);

    } catch (IOException ioe) {
        throw ioe;
    } catch (HttpStatusCodeException hsce) {
        throw new GoogleDocsServiceException(hsce.getMessage(), hsce.getStatusCode().value());
    } finally {
        if (file != null) {
            file.delete();
        }
    }

    return driveFile;
}

From source file:annis.administration.DefaultAdministrationDao.java

private void readOldResolverVisMapFormat(File resolver_vis_tab) {
    StringBuilder sb = new StringBuilder();
    sb.append("CREATE TABLE tmp_resolver_vis_map ");
    sb.append("( ");
    sb.append("\"corpus\" varchar, ");
    sb.append("\"version\" varchar, ");
    sb.append("\"namespace\" varchar, ");
    sb.append("\"element\" varchar, ");
    sb.append("\"vis_type\" varchar NOT NULL, ");
    sb.append("\"display_name\" varchar NOT NULL, ");
    sb.append("\"order\" integer default '0', ");
    sb.append("\"mappings\" varchar");
    sb.append(");");

    jdbcTemplate.execute(sb.toString());

    bulkloadTableFromResource("tmp_resolver_vis_map", new FileSystemResource(resolver_vis_tab));

    sb = new StringBuilder();

    sb.append("INSERT INTO ");
    sb.append(tableInStagingArea(FILE_RESOLVER_VIS_MAP));
    sb.append("\n\t");
    sb.append(" (");
    sb.append("corpus, ");
    sb.append("version, ");
    sb.append("namespace, ");
    sb.append("element, ");
    sb.append("vis_type, ");
    sb.append("display_name, ");
    sb.append("\"order\", ");
    sb.append("mappings");
    sb.append(")");
    sb.append("\n");
    sb.append("SELECT tmp.corpus, ");
    sb.append("tmp.version, ");
    sb.append("tmp.namespace, ");
    sb.append("tmp.element, ");
    sb.append("tmp.vis_type, ");
    sb.append("tmp.display_name, ");
    sb.append("tmp.\"order\", ");
    sb.append("tmp.mappings");
    sb.append("\n\t");
    sb.append("FROM tmp_resolver_vis_map AS tmp; ");

    jdbcTemplate.execute(sb.toString());
    jdbcTemplate.execute("DROP TABLE tmp_resolver_vis_map;");
}