Example usage for java.nio.file Path resolve

List of usage examples for java.nio.file Path resolve

Introduction

In this page you can find the example usage for java.nio.file Path resolve.

Prototype

default Path resolve(String other) 

Source Link

Document

Converts a given path string to a Path and resolves it against this Path in exactly the manner specified by the #resolve(Path) resolve method.

Usage

From source file:de.topobyte.osm4j.extra.extracts.query.Query.java

public void execute() throws IOException {
    createTemporaryDirectory();//from www.j  a  v a 2 s .  co  m

    // Query setup

    openTree();

    Geometry box = factory.toGeometry(queryEnvelope);
    List<Node> leafs = tree.query(box);

    // Query data tree

    for (Node leaf : leafs) {
        String leafName = Long.toHexString(leaf.getPath());

        if (test.contains(leaf.getEnvelope())) {
            System.out.println("Leaf is completely contained: " + leafName);
            addCompletelyContainedLeaf(leaf);
            continue;
        }

        System.out.println("Loading data from leaf: " + leafName);
        addIntersectingLeaf(leaf);
    }

    System.out.println(String.format("Total number of nodes: %d", nNodes));
    System.out.println(String.format("Total number of ways: %d", nWays));
    System.out.println(String.format("Total number of simple relations: %d", nSimpleRelations));
    System.out.println(String.format("Total number of complex relations: %d", nComplexRelations));

    // Query relations

    List<IdBboxEntry> entriesSimple = IdBboxUtil.read(paths.getSimpleRelationsBboxes());
    List<IdBboxEntry> entriesComplex = IdBboxUtil.read(paths.getComplexRelationsBboxes());

    for (IdBboxEntry entry : entriesSimple) {
        long id = entry.getId();
        if (test.contains(entry.getEnvelope())) {
            System.out.println("Simple batch completely contained: " + id);
            addCompletelyContainedBatch(paths.getSimpleRelations(), id, filesSimpleRelations);
        } else if (test.intersects(entry.getEnvelope())) {
            System.out.println("Loading data from simple batch: " + id);
            tmpIndexSimple++;
            String tmpFilenames = filename(tmpIndexSimple);
            System.out.println("Writing to files: " + tmpFilenames);

            Path pathDir = paths.getSimpleRelations().resolve(Long.toString(entry.getId()));
            Path pathNodes = pathDir.resolve(relationNames.getNodes());
            Path pathWays = pathDir.resolve(relationNames.getWays());
            Path pathRelations = pathDir.resolve(relationNames.getRelations());

            Path pathOutNodes = pathTmpSimpleNodes.resolve(tmpFilenames);
            Path pathOutWays = pathTmpSimpleWays.resolve(tmpFilenames);
            Path pathOutRelations = pathTmpSimpleRelations.resolve(tmpFilenames);

            runRelationsQuery(true, tmpFilenames, pathNodes, pathWays, pathRelations, pathOutNodes, pathOutWays,
                    pathOutRelations);
        }
    }

    for (IdBboxEntry entry : entriesComplex) {
        long id = entry.getId();
        if (test.contains(entry.getEnvelope())) {
            System.out.println("Complex batch completely contained: " + id);
            addCompletelyContainedBatch(paths.getComplexRelations(), id, filesComplexRelations);
        } else if (test.intersects(entry.getEnvelope())) {
            System.out.println("Loading data from complex batch: " + id);
            tmpIndexComplex++;
            String tmpFilenames = filename(tmpIndexComplex);
            System.out.println("Writing to files: " + tmpFilenames);

            Path pathDir = paths.getComplexRelations().resolve(Long.toString(entry.getId()));
            Path pathNodes = pathDir.resolve(relationNames.getNodes());
            Path pathWays = pathDir.resolve(relationNames.getWays());
            Path pathRelations = pathDir.resolve(relationNames.getRelations());

            Path pathOutNodes = pathTmpComplexNodes.resolve(tmpFilenames);
            Path pathOutWays = pathTmpComplexWays.resolve(tmpFilenames);
            Path pathOutRelations = pathTmpComplexRelations.resolve(tmpFilenames);

            runRelationsQuery(false, tmpFilenames, pathNodes, pathWays, pathRelations, pathOutNodes,
                    pathOutWays, pathOutRelations);
        }
    }

    // Merge intermediate files

    OsmStreamOutput output = createFinalOutput(pathOutput);

    List<OsmFileInput> mergeFiles = new ArrayList<>();

    mergeFiles.addAll(filesNodes);
    mergeFiles.addAll(filesWays);
    mergeFiles.addAll(filesSimpleRelations);
    mergeFiles.addAll(filesComplexRelations);

    System.out.println(String.format("Merging %d files", mergeFiles.size()));

    List<OsmIteratorInput> mergeIteratorInputs = new ArrayList<>();
    List<OsmIterator> mergeIterators = new ArrayList<>();
    for (OsmFileInput input : mergeFiles) {
        OsmIteratorInput iteratorInput = input.createIterator(true, outputConfig.isWriteMetadata());
        mergeIteratorInputs.add(iteratorInput);
        mergeIterators.add(iteratorInput.getIterator());
    }

    SortedMerge merge = new SortedMerge(output.getOsmOutput(), mergeIterators);
    merge.run();

    for (OsmIteratorInput input : mergeIteratorInputs) {
        input.close();
    }

    output.close();

    // Delete intermediate files

    if (!keepTmp) {
        FileUtils.deleteDirectory(pathTmp.toFile());
    }
}

From source file:de.ks.flatadocdb.session.Session.java

protected void persist(Object entity, EntityDescriptor entityDescriptor, Path folder, String fileName) {
    Serializable naturalId = entityDescriptor.getNaturalId(entity);

    Path complete = folder.resolve(fileName);

    String id = idGenerator.getSha1Hash(repository.getPath(), complete);

    Object found = findById(id);/*ww w  .j a v  a  2  s. c  om*/
    if ((found != null || entity2Entry.containsKey(entity)) && !dirtyChecker.getDeletions().contains(entity)) {
        log.warn("Trying to persist entity {} [{}] twice", entity, complete);
        return;
    }

    entityDescriptor.writetId(entity, id);

    SessionEntry sessionEntry = new SessionEntry(entity, id, 0,
            naturalId == null ? null : new NaturalId(entityDescriptor.getEntityClass(), naturalId), complete,
            entityDescriptor);
    addToSession(sessionEntry);

    dirtyChecker.trackPersist(sessionEntry);

    EntityInsertion singleEntityInsertion = new EntityInsertion(repository, sessionEntry);
    actions.add(singleEntityInsertion);

    persistRelations(entityDescriptor.getNormalRelations(), entity, sessionEntry);
    persistRelations(entityDescriptor.getChildRelations(), entity, sessionEntry);
}

From source file:im.bci.gamesitekit.GameSiteKitMain.java

private void buildHtml(Locale locale)
        throws SAXException, IOException, TemplateException, ParserConfigurationException {
    Path localeOutputDir = outputDir.resolve(locale.getLanguage());
    Files.createDirectories(localeOutputDir);
    HashMap<String, Object> model = new HashMap<>();
    model.put("screenshots", createScreenshotsMV(localeOutputDir));
    model.put("lastUpdate", new Date());
    freemakerConfiguration.setLocale(locale);
    freemakerConfiguration.addAutoImport("manifest", "manifest.ftl");
    for (String file : Arrays.asList("index", "support")) {
        try (BufferedWriter w = Files.newBufferedWriter(localeOutputDir.resolve(file + ".html"),
                Charset.forName("UTF-8"))) {
            freemakerConfiguration.getTemplate(file + ".ftl").process(model, w);
        }/*www  .  j a  v  a  2  s.c  o m*/
    }
}

From source file:ca.polymtl.dorsal.libdelorean.statedump.Statedump.java

/**
 * Save this statedump at the given location.
 *
 * @param parentPath/*from  w  w  w .  j  av a  2 s . c o  m*/
 *            The location where to save the statedump file, usually in or
 *            close to its corresponding trace. It will be put under a Trace
 *            Compass-specific sub-directory.
 * @param ssid
 *            The state system ID of the state system we are saving. This
 *            will be used for restoration.
 * @throws IOException
 *             If there are problems creating or writing to the target
 *             directory
 */
public void dumpState(Path parentPath, String ssid) throws IOException {
    /* Create directory if it does not exist */
    Path sdPath = parentPath.resolve(STATEDUMP_DIRECTORY);
    if (!Files.exists(sdPath)) {
        Files.createDirectory(sdPath);
    }

    /* Create state dump file */
    String fileName = ssid + FILE_SUFFIX;
    Path filePath = sdPath.resolve(fileName);
    if (Files.exists(filePath)) {
        Files.delete(filePath);
    }
    Files.createFile(filePath);

    JSONObject root = new JSONObject();

    try (Writer bw = Files.newBufferedWriter(filePath, Charsets.UTF_8)) {
        /* Create the root object */
        root.put(Serialization.FORMAT_VERSION_KEY, STATEDUMP_FORMAT_VERSION);
        root.put(Serialization.ID_KEY, ssid);
        root.put(Serialization.STATEDUMP_VERSION_KEY, getVersion());

        /* Create the root state node */
        JSONObject rootNode = new JSONObject();
        rootNode.put(Serialization.CHILDREN_KEY, new JSONObject());
        root.put(Serialization.STATE_KEY, rootNode);

        /* Insert all the paths, types, and values */
        for (int i = 0; i < getAttributes().size(); i++) {
            String[] attribute = getAttributes().get(i);
            StateValue sv = getStates().get(i);

            Serialization.insertFrom(rootNode, attribute, 0, sv);
        }

        bw.write(root.toString(2));

    } catch (JSONException e) {
        /*
         * This should never happen. Any JSON exception means that there's a
         * bug in this code.
         */
        throw new IllegalStateException(e);
    }
}

From source file:com.gitpitch.services.OfflineService.java

private int fetchOnlineMarkdown(PitchParams pp, Path zipRoot) {

    String murl = com.gitpitch.controllers.routes.PitchController
            .markdown(pp.grs, pp.user, pp.repo, pp.branch, pp.pitchme).absoluteURL(isEncrypted(), hostname());

    Path zipMdPath = diskService.ensure(zipRoot.resolve(ZIP_MD_DIR));
    return diskService.download(pp, zipMdPath, murl, PITCHME_ONLINE_MD, grsManager.get(pp).getHeaders());
}

From source file:com.gooddata.dataset.DatasetService.java

/**
 * Loads datasets into platform. Uploads given datasets and their manifests to staging area and triggers ETL pull.
 * The call is asynchronous returning {@link com.gooddata.FutureResult} to let caller wait for results.
 * Uploaded files are deleted from staging area when finished.
 *
 * @param project  project to which dataset belongs
 * @param datasets map dataset manifests
 * @return {@link com.gooddata.FutureResult} of the task, which can throw {@link com.gooddata.dataset.DatasetException}
 * in case the ETL pull task fails/*  w  w w  .ja v a2  s  .  co  m*/
 * @throws com.gooddata.dataset.DatasetException if there is a problem to serialize manifest or upload dataset
 * @see <a href="https://developer.gooddata.com/article/multiload-of-csv-data">batch upload reference</a>
 */
public FutureResult<Void> loadDatasets(final Project project, final Collection<DatasetManifest> datasets) {
    notNull(project, "project");
    validateUploadManifests(datasets);
    final List<String> datasetsNames = new ArrayList<>(datasets.size());
    try {
        final Path dirPath = Paths.get("/", project.getId() + "_" + RandomStringUtils.randomAlphabetic(3), "/");
        for (DatasetManifest datasetManifest : datasets) {
            datasetsNames.add(datasetManifest.getDataSet());
            dataStoreService.upload(dirPath.resolve(datasetManifest.getFile()).toString(),
                    datasetManifest.getSource());
        }

        final String manifestJson = mapper.writeValueAsString(new DatasetManifests(datasets));
        final ByteArrayInputStream inputStream = new ByteArrayInputStream(manifestJson.getBytes(UTF_8));
        dataStoreService.upload(dirPath.resolve(MANIFEST_FILE_NAME).toString(), inputStream);

        return pullLoad(project, dirPath, datasetsNames);
    } catch (IOException e) {
        throw new DatasetException("Unable to serialize manifest", datasetsNames, e);
    } catch (DataStoreException | GoodDataRestException | RestClientException e) {
        throw new DatasetException("Unable to load", datasetsNames, e);
    }
}

From source file:edu.cornell.mannlib.vitro.webapp.servlet.setup.UpdateKnowledgeBase.java

/**
 * Create the directories where we will report on the update. 
 * Put the paths for the directories and files into the settings object.
 *///from w ww  . ja va  2  s . c  o m
private void putReportingPathsIntoSettings(ServletContext ctx, UpdateSettings settings) throws IOException {
    Path homeDir = ApplicationUtils.instance().getHomeDirectory().getPath();

    Path dataDir = createDirectory(homeDir, "upgrade", "knowledgeBase");
    settings.setDataDir(dataDir.toString());
    StartupStatus.getBean(ctx).info(parent, "Updating knowledge base: reports are in '" + dataDir + "'");

    Path changedDir = createDirectory(dataDir, "changedData");
    settings.setAddedDataFile(changedDir.resolve(timestampedFileName("addedData", "n3")).toString());
    settings.setRemovedDataFile(changedDir.resolve(timestampedFileName("removedData", "n3")).toString());

    Path logDir = createDirectory(dataDir, "logs");
    settings.setLogFile(logDir.resolve(timestampedFileName("knowledgeBaseUpdate", "log")).toString());
    settings.setErrorLogFile(
            logDir.resolve(timestampedFileName("knowledgeBaseUpdate.error", "log")).toString());

    Path qualifiedPropertyConfigFile = getFilePath(homeDir, "rdf", "display", "firsttime", "PropertyConfig.n3");
    settings.setQualifiedPropertyConfigFile(qualifiedPropertyConfigFile.toString());
}

From source file:de.topobyte.osm4j.extra.extracts.query.Query.java

private void createTemporaryDirectory() throws IOException {
    // Make sure a temporary directory is available

    if (pathTmp == null) {
        pathTmp = Files.createTempDirectory("extract");
    }//from  ww  w.j  a va2s  . co  m
    System.out.println("Temporary directory: " + pathTmp);
    Files.createDirectories(pathTmp);
    if (!Files.isDirectory(pathTmp)) {
        System.out.println("Unable to create temporary directory for intermediate files");
        System.exit(1);
    }
    if (pathTmp.toFile().listFiles().length != 0) {
        System.out.println("Temporary directory for intermediate files is not empty");
        System.exit(1);
    }
    System.out.println("Storing intermediate files here: " + pathTmp);

    // Create sub-directories for intermediate files

    Path pathTmpTree = pathTmp.resolve("tree");
    Path pathTmpSimple = pathTmp.resolve("simple-relations");
    Path pathTmpComplex = pathTmp.resolve("complex-relations");

    pathTmpTreeNodes = pathTmpTree.resolve("nodes");
    pathTmpTreeWays = pathTmpTree.resolve("ways");
    pathTmpTreeSimpleRelations = pathTmpTree.resolve("relations.simple");
    pathTmpTreeComplexRelations = pathTmpTree.resolve("relations.complex");
    pathTmpTreeAdditionalNodes = pathTmpTree.resolve("nodes-extra");
    pathTmpTreeAdditionalWays = pathTmpTree.resolve("ways-extra");

    pathTmpSimpleNodes = pathTmpSimple.resolve("nodes");
    pathTmpSimpleWays = pathTmpSimple.resolve("ways");
    pathTmpSimpleRelations = pathTmpSimple.resolve("relations");
    pathTmpComplexNodes = pathTmpComplex.resolve("nodes");
    pathTmpComplexWays = pathTmpComplex.resolve("ways");
    pathTmpComplexRelations = pathTmpComplex.resolve("relations");

    Files.createDirectory(pathTmpTree);
    Files.createDirectory(pathTmpSimple);
    Files.createDirectory(pathTmpComplex);

    Files.createDirectory(pathTmpTreeNodes);
    Files.createDirectory(pathTmpTreeWays);
    Files.createDirectory(pathTmpTreeSimpleRelations);
    Files.createDirectory(pathTmpTreeComplexRelations);
    Files.createDirectory(pathTmpTreeAdditionalNodes);
    Files.createDirectory(pathTmpTreeAdditionalWays);

    Files.createDirectory(pathTmpSimpleNodes);
    Files.createDirectory(pathTmpSimpleWays);
    Files.createDirectory(pathTmpSimpleRelations);
    Files.createDirectory(pathTmpComplexNodes);
    Files.createDirectory(pathTmpComplexWays);
    Files.createDirectory(pathTmpComplexRelations);
}

From source file:eu.eubrazilcc.lvl.service.io.ImportPublicationsTask.java

private Callable<Integer> importPubMedSubTask(final List<String> ids, final EntrezHelper entrez,
        final File tmpDir, final Format format, final String extension) {
    return new Callable<Integer>() {
        private int efetchCount = 0;

        @Override/*from  w w w  . j  av  a2s .  c o  m*/
        public Integer call() throws Exception {
            setStatus("Finding missing publications between PubMed and the local collection");
            // filter out the publications that are already stored in the database, creating a new set
            // with the identifiers that are missing from the database. Using a set ensures that 
            // duplicate identifiers are also removed from the original list
            final List<String> ids2 = from(ids).transform(new Function<String, String>() {
                @Override
                public String apply(final String id) {
                    String result = id;
                    for (int i = 0; i < filters.size() && result != null; i++) {
                        final RecordFilter filter = filters.get(i);
                        if (filter.canBeApplied(PUBMED)) {
                            result = filters.get(i).filterById(id);
                        }
                    }
                    return result;
                }
            }).filter(notNull()).toSet().asList();
            if (ids2.size() > 0) {
                setStatus("Fetching publications from PubMed");
                // update progress
                int pendingCount = pending.addAndGet(ids2.size());
                setProgress(100.0d * fetched.get() / pendingCount);
                // fetch sequence files
                final Path tmpDir2 = createTempDirectory(tmpDir.toPath(), "fetch_pub_task_");
                entrez.efetch(ids2, 0, MAX_RECORDS_FETCHED, tmpDir2.toFile(), format);
                // import publication files to the database
                for (final String id : ids2) {
                    setStatus("Importing PubMed publications into local collection");
                    final Path source = tmpDir2.resolve(id + "." + extension);
                    try {
                        // insert publication in the database
                        final PubmedArticle pmArticle = PUBMED_XMLB.typeFromFile(source.toFile());
                        final Reference reference = parseArticle(pmArticle);
                        REFERENCE_DAO.insert(reference, true);
                        efetchCount++;
                        LOGGER.info("New PubMed file stored: " + source.toString());
                        // update progress
                        int fetchedCount = fetched.incrementAndGet();
                        setProgress(100.0d * fetchedCount / pending.get());
                    } catch (Exception e) {
                        LOGGER.warn("Failed to import publication from file: " + source.getFileName(), e);
                    }
                }
            }
            checkState(ids2.size() == efetchCount, "No all publications were imported");
            return efetchCount;
        }
    };
}

From source file:com.boundlessgeo.geoserver.bundle.BundleExporterTest.java

@Test
public void testInjestIntoGeopkg() throws Exception {
    new CatalogCreator(cat).workspace("foo").database("bar")
            .featureType("stuff", "geom:Point:srid=4326,name:String,id:Integer", stuff()).layer().store()
            .featureType("widgets", "geom:Point:srid=4326,name:String,id:Integer", widgets()).layer();

    exporter = new BundleExporter(cat, new ExportOpts(cat.getWorkspaceByName("foo")));
    Path root = exporter.run();

    assertPathExists(root, "workspace.xml");
    assertPathExists(root, "namespace.xml");
    assertPathExists(root, "bar/datastore.xml");
    assertPathExists(root, "bar/stuff/featuretype.xml");
    assertPathExists(root, "bar/stuff/layer.xml");
    assertPathExists(root, "bar/widgets/featuretype.xml");
    assertPathExists(root, "bar/widgets/layer.xml");
    assertPathExists(root, "data/bar.gpkg");

    // ensure the geopackage has the right data in it
    GeoPackage gpkg = new GeoPackage(root.resolve("data/bar.gpkg").toFile());
    try {/*  w  w w .  ja v  a  2  s  .co m*/
        assertEquals(2, gpkg.features().size());
        assertNotNull(gpkg.feature("stuff"));
        assertNotNull(gpkg.feature("widgets"));
    } finally {
        gpkg.close();
    }

    // ensure the exported store config points to the geopackage
    DataStoreInfo store = new XStreamPersisterFactory().createXMLPersister()
            .load(new FileInputStream(root.resolve("bar/datastore.xml").toFile()), DataStoreInfo.class);

    assertEquals("geopkg", store.getConnectionParameters().get("dbtype"));
    assertEquals("file:%WORKSPACE%/data" + File.separator + "bar.gpkg",
            store.getConnectionParameters().get("database"));
}