Example usage for java.nio.file Path toAbsolutePath

List of usage examples for java.nio.file Path toAbsolutePath

Introduction

In this page you can find the example usage for java.nio.file Path toAbsolutePath.

Prototype

Path toAbsolutePath();

Source Link

Document

Returns a Path object representing the absolute path of this path.

Usage

From source file:com.spectralogic.ds3client.integration.Smoke_Test.java

@Test
public void partialObjectGetOverChunkBoundry() throws IOException, XmlProcessingException {
    final String bucketName = "partialGetOverBoundry";
    final String testFile = "testObject.txt";
    final Path filePath = Files.createTempFile("ds3", testFile);
    final int seed = 12345;
    LOG.info("Test file: " + filePath.toAbsolutePath());
    try {/*from w w  w  .jav a2s  . c  o m*/
        HELPERS.ensureBucketExists(bucketName, envDataPolicyId);

        final int objectSize = PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES * 2;

        final List<Ds3Object> objs = Lists.newArrayList(new Ds3Object(testFile, objectSize));

        final Ds3ClientHelpers.Job putJob = HELPERS.startWriteJob(bucketName, objs, WriteJobOptions.create()
                .withMaxUploadSize(PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES));

        putJob.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() {
            @Override
            public SeekableByteChannel buildChannel(final String key) throws IOException {
                final byte[] randomData = IOUtils.toByteArray(new RandomDataInputStream(seed, objectSize));
                final ByteBuffer randomBuffer = ByteBuffer.wrap(randomData);

                final ByteArraySeekableByteChannel channel = new ByteArraySeekableByteChannel(objectSize);
                channel.write(randomBuffer);

                return channel;

            }
        });

        final List<Ds3Object> partialObjectGet = Lists.newArrayList();
        partialObjectGet.add(new PartialDs3Object(testFile,
                Range.byPosition(PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES - 100,
                        PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES + 99)));

        final Ds3ClientHelpers.Job getJob = HELPERS.startReadJob(bucketName, partialObjectGet);

        getJob.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() {
            @Override
            public SeekableByteChannel buildChannel(final String key) throws IOException {
                return Files.newByteChannel(filePath, StandardOpenOption.WRITE, StandardOpenOption.CREATE);
            }
        });

        assertThat(Files.size(filePath), is(200L));

    } finally {
        Files.delete(filePath);
        deleteAllContents(client, bucketName);
    }
}

From source file:org.tinymediamanager.core.movie.tasks.MovieUpdateDatasourceTask2.java

private void parseMovieDirectory(Path movieDir, Path dataSource) {
    List<Path> movieDirList = listFilesAndDirs(movieDir);
    ArrayList<Path> files = new ArrayList<>();
    ArrayList<Path> dirs = new ArrayList<>(); // FIXME: what for....?
    HashSet<String> normalizedVideoFiles = new HashSet<>(); // just for
                                                            // identifying MMD

    boolean isDiscFolder = false;
    boolean isMultiMovieDir = false;
    boolean videoFileFound = false;
    Path movieRoot = movieDir; // root set to current dir - might be adjusted by
                               // disc folders

    for (Path path : movieDirList) {
        if (Utils.isRegularFile(path)) {
            files.add(path.toAbsolutePath());

            // do not construct a fully MF yet
            // just minimal to get the type out of filename
            MediaFile mf = new MediaFile();
            mf.setPath(path.getParent().toString());
            mf.setFilename(path.getFileName().toString());
            mf.setType(mf.parseType());//www.  ja  v a 2  s.co m

            // System.out.println("************ " + mf);
            if (mf.getType() == MediaFileType.VIDEO) {
                videoFileFound = true;
                if (mf.isDiscFile()) {
                    isDiscFolder = true;
                    break; // step out - this is all we need to know
                } else {
                    // detect unique basename, without stacking etc
                    String[] ty = ParserUtils.detectCleanMovienameAndYear(
                            FilenameUtils.getBaseName(Utils.cleanStackingMarkers(mf.getFilename())));
                    normalizedVideoFiles.add(ty[0] + ty[1]);
                }
            }
        } else if (Files.isDirectory(path)) {
            dirs.add(path.toAbsolutePath());
        }
    }

    if (!videoFileFound) {
        // hmm... we never found a video file (but maybe others, trailers) so NO
        // need to parse THIS folder
        return;
    }

    if (isDiscFolder) {
        // if inside own DiscFolder, walk backwards till movieRoot folder
        Path relative = dataSource.relativize(movieDir);
        while (relative.toString().toUpperCase(Locale.ROOT).contains("VIDEO_TS")
                || relative.toString().toUpperCase(Locale.ROOT).contains("BDMV")) {
            movieDir = movieDir.getParent();
            relative = dataSource.relativize(movieDir);
        }
        movieRoot = movieDir;
    } else {
        // no VIDEO files in this dir - skip this folder
        if (normalizedVideoFiles.size() == 0) {
            return;
        }
        // more than one (unstacked) movie file in directory (or DS root) -> must
        // parsed as multiMovieDir
        if (normalizedVideoFiles.size() > 1 || movieDir.equals(dataSource)) {
            isMultiMovieDir = true;
        }
    }

    if (cancel) {
        return;
    }
    // ok, we're ready to parse :)
    if (isMultiMovieDir) {
        createMultiMovieFromDir(dataSource, movieRoot, files);
    } else {
        createSingleMovieFromDir(dataSource, movieRoot, isDiscFolder);
    }

}

From source file:org.apache.openaz.xacml.std.pap.StdEngine.java

private void loadGroups() throws PAPException {
    ///*from w  w w.  j  a  va2 s  .  com*/
    // Create a properties object
    //
    Properties properties = new Properties();
    Path file = Paths.get(this.repository.toString(), XACMLProperties.XACML_PROPERTIES_NAME);
    try {
        //
        // Load the properties
        //
        try (InputStream is = new FileInputStream(file.toFile())) {
            properties.load(is);
        }

        //
        // Parse it
        //
        this.groups = this.readProperties(this.repository, properties);
    } catch (IOException e) {
        logger.error("Failed to load " + file.toAbsolutePath().toString());
        this.groups = new HashSet<StdPDPGroup>();
    }
    //
    // Initialize the default group
    //
    PDPGroup defaultGroup = this.initializeDefaultGroup(file, properties);
    logger.info("Default group is: " + defaultGroup.getId() + "=" + defaultGroup.getName());
}

From source file:com.vmware.photon.controller.deployer.xenon.task.CreateDhcpVmTaskService.java

private void processConfigIso(State currentState, VmService.State vmState, HostService.State hostState)
        throws Throwable {

    checkState(hostState.metadata.containsKey(HostService.State.METADATA_KEY_NAME_MANAGEMENT_NETWORK_GATEWAY));
    checkState(hostState.metadata.containsKey(HostService.State.METADATA_KEY_NAME_MANAGEMENT_NETWORK_IP));
    checkState(hostState.metadata.containsKey(HostService.State.METADATA_KEY_NAME_MANAGEMENT_NETWORK_NETMASK));
    checkState(/*from   w  ww.j av a2 s. c  o m*/
            hostState.metadata.containsKey(HostService.State.METADATA_KEY_NAME_MANAGEMENT_NETWORK_DNS_SERVER));

    String gateway = hostState.metadata.get(HostService.State.METADATA_KEY_NAME_MANAGEMENT_NETWORK_GATEWAY);
    String ipAddress = hostState.metadata.get(HostService.State.METADATA_KEY_NAME_MANAGEMENT_NETWORK_IP);
    String netmask = hostState.metadata.get(HostService.State.METADATA_KEY_NAME_MANAGEMENT_NETWORK_NETMASK);
    String dnsEndpointList = hostState.metadata
            .get(HostService.State.METADATA_KEY_NAME_MANAGEMENT_NETWORK_DNS_SERVER);
    if (!Strings.isNullOrEmpty(dnsEndpointList)) {
        dnsEndpointList = Stream.of(dnsEndpointList.split(",")).map((dnsServer) -> "DNS=" + dnsServer + "\n")
                .collect(StringBuilder::new, StringBuilder::append, StringBuilder::append).toString();
    }

    DeployerContext deployerContext = HostUtils.getDeployerContext(this);
    String scriptDirectory = deployerContext.getScriptDirectory();

    String userDataConfigFileContent = new String(
            Files.readAllBytes(Paths.get(scriptDirectory, "user-data.template")), StandardCharsets.UTF_8)
                    .replace("$GATEWAY", gateway)
                    .replace("$ADDRESS", new SubnetUtils(ipAddress, netmask).getInfo().getCidrSignature())
                    .replace("$DNS", dnsEndpointList);

    if (currentState.ntpEndpoint != null) {
        userDataConfigFileContent = userDataConfigFileContent.replace("$NTP", currentState.ntpEndpoint);
    }

    String metadataConfigFileContent = new String(
            Files.readAllBytes(Paths.get(scriptDirectory, "meta-data.template")), StandardCharsets.UTF_8)
                    .replace("$INSTANCE_ID", vmState.name).replace("$LOCAL_HOSTNAME", vmState.name);

    Path vmConfigDirectoryPath = Files.createTempDirectory("iso-" + currentState.vmId).toAbsolutePath();
    Path userDataConfigFilePath = vmConfigDirectoryPath.resolve("user-data.yml");
    Files.write(userDataConfigFilePath, userDataConfigFileContent.getBytes(StandardCharsets.UTF_8));
    Path metadataConfigFilePath = vmConfigDirectoryPath.resolve("meta-data.yml");
    Files.write(metadataConfigFilePath, metadataConfigFileContent.getBytes(StandardCharsets.UTF_8));
    Path isoFilePath = vmConfigDirectoryPath.resolve("config.iso");

    List<String> command = new ArrayList<>();
    command.add("./" + SCRIPT_NAME);
    command.add(isoFilePath.toAbsolutePath().toString());
    command.add(userDataConfigFilePath.toAbsolutePath().toString());
    command.add(metadataConfigFilePath.toAbsolutePath().toString());
    command.add(currentState.serviceConfigDirectory);

    File scriptLogFile = new File(deployerContext.getScriptLogDirectory(), SCRIPT_NAME + "-" + vmState.vmId
            + "-" + ServiceUtils.getIDFromDocumentSelfLink(currentState.documentSelfLink) + ".log");

    ScriptRunner scriptRunner = new ScriptRunner.Builder(command, deployerContext.getScriptTimeoutSec())
            .directory(deployerContext.getScriptDirectory())
            .redirectOutput(ProcessBuilder.Redirect.to(scriptLogFile)).build();

    ListenableFutureTask<Integer> futureTask = ListenableFutureTask.create(scriptRunner);
    HostUtils.getListeningExecutorService(this).submit(futureTask);
    Futures.addCallback(futureTask, new FutureCallback<Integer>() {
        @Override
        public void onSuccess(@javax.validation.constraints.NotNull Integer result) {
            try {
                if (result != 0) {
                    logScriptErrorAndFail(currentState, result, scriptLogFile);
                } else {
                    State patchState = buildPatch(TaskState.TaskStage.STARTED, TaskState.SubStage.ATTACH_ISO,
                            null);
                    patchState.vmConfigDirectory = vmConfigDirectoryPath.toAbsolutePath().toString();
                    TaskUtils.sendSelfPatch(CreateDhcpVmTaskService.this, patchState);
                }
            } catch (Throwable t) {
                failTask(t);
            }
        }

        @Override
        public void onFailure(Throwable throwable) {
            failTask(throwable);
        }
    });
}

From source file:org.fao.geonet.kernel.harvest.harvester.localfilesystem.LocalFsHarvesterFileVisitor.java

@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
    if (cancelMonitor.get()) {
        return FileVisitResult.TERMINATE;
    }//w w w  . ja  v a 2s . c  om

    try {
        if (file != null && file.getFileName() != null && file.getFileName().toString() != null
                && (file.getFileName().toString().endsWith(".xml")
                        || MEFLib.isValidArchiveExtensionForMEF(file.getFileName().toString()))) {

            result.totalMetadata++;

            if (LOGGER.isDebugEnabled() && result.totalMetadata % 1000 == 0) {
                long elapsedTime = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime);
                LOGGER.debug(result.totalMetadata + "records inserted in " + elapsedTime + " s ("
                        + result.totalMetadata / elapsedTime + " records/s).");
            }

            Path filePath = file.toAbsolutePath().normalize();
            if (MEFLib.isValidArchiveExtensionForMEF(file.getFileName().toString())) {
                processMef(file, filePath);
                return FileVisitResult.CONTINUE;
            }

            Element xml;
            try {
                LOGGER.debug("reading file: " + filePath);
                xml = Xml.loadFile(file);
            } catch (JDOMException e) { // JDOM problem
                LOGGER.debug("Error loading XML from file " + filePath + ", ignoring");
                if (LOGGER.isDebugEnabled()) {
                    LOGGER.error(e);
                }
                result.badFormat++;
                return FileVisitResult.CONTINUE; // skip this one
            } catch (Throwable e) { // some other error
                LOGGER.debug("Error retrieving XML from file  " + filePath + ", ignoring");
                if (LOGGER.isDebugEnabled()) {
                    LOGGER.error(e);
                }
                result.unretrievable++;
                return FileVisitResult.CONTINUE; // skip this one
            }

            // transform using importxslt if not none
            if (transformIt) {
                try {
                    xml = Xml.transform(xml, thisXslt);
                } catch (Exception e) {
                    LOGGER.debug("Cannot transform XML from file " + filePath + ", ignoring. Error was: "
                            + e.getMessage());
                    result.badFormat++;
                    return FileVisitResult.CONTINUE; // skip this one
                }
            }

            String schema = null;
            try {
                schema = dataMan.autodetectSchema(xml, null);
            } catch (Exception e) {
                result.unknownSchema++;
            }
            if (schema == null) {
                return FileVisitResult.CONTINUE;
            }

            try {
                params.getValidate().validate(dataMan, context, xml);
            } catch (Exception e) {
                LOGGER.debug("Cannot validate XML from file " + filePath + ", ignoring. Error was: "
                        + e.getMessage());
                result.doesNotValidate++;
                return FileVisitResult.CONTINUE; // skip this one
            }

            String uuid = getUuidFromFile(xml, filePath, schema);
            if (uuid == null || uuid.equals("")) {
                result.badFormat++;
                return FileVisitResult.CONTINUE;
            }

            String id = dataMan.getMetadataId(uuid);
            String changeDate = new ISODate(System.currentTimeMillis(), false).getDateAndTime();
            if (id == null) {
                // For new record change date will be the time of metadata xml date change or the date when
                // the record was harvested (if can't be obtained the metadata xml date change)
                String createDate;
                // or the last modified date of the file
                if (params.checkFileLastModifiedForUpdate) {
                    createDate = new ISODate(Files.getLastModifiedTime(file).toMillis(), false)
                            .getDateAndTime();
                } else {
                    try {
                        createDate = dataMan.extractDateModified(schema, xml);
                    } catch (Exception ex) {
                        LOGGER.error(
                                "LocalFilesystemHarvester - addMetadata - can't get metadata modified date for metadata uuid= "
                                        + uuid + " using current date for modified date");
                        createDate = new ISODate().toString();
                    }
                }

                LOGGER.debug("adding new metadata");
                id = addMetadata(xml, schema, uuid, createDate);
            } else {
                // Check last modified date of the file with the record change date
                // to check if an update is required
                if (params.checkFileLastModifiedForUpdate) {
                    Date fileDate = new Date(Files.getLastModifiedTime(file).toMillis());

                    final AbstractMetadata metadata = repo.findOne(id);
                    ISODate modified = new ISODate();
                    if (metadata != null && metadata.getDataInfo() != null) {
                        modified = metadata.getDataInfo().getChangeDate();
                    }

                    Date recordDate = modified.toDate();

                    changeDate = new ISODate(fileDate.getTime(), false).getDateAndTime();

                    LOGGER.debug(" File date is: " + filePath + "filePath / record date is: " + modified);

                    if (DateUtils.truncate(recordDate, Calendar.SECOND)
                            .before(DateUtils.truncate(fileDate, Calendar.SECOND))) {
                        LOGGER.debug("  Db record is older than file. Updating record with id: " + id);
                        updateMedata(xml, id, changeDate);
                    } else {
                        LOGGER.debug(
                                "  Db record is not older than last modified date of file. No need for update.");
                        result.unchangedMetadata++;
                    }
                } else {
                    id = dataMan.getMetadataId(uuid);
                    if (id == null) {
                        // For new record change date will be the time of metadata xml date change or the date when
                        // the record was harvested (if can't be obtained the metadata xml date change)
                        String createDate;
                        // or the last modified date of the file
                        if (params.checkFileLastModifiedForUpdate) {
                            createDate = new ISODate(Files.getLastModifiedTime(file).toMillis(), false)
                                    .getDateAndTime();
                        } else {
                            try {
                                createDate = dataMan.extractDateModified(schema, xml);
                            } catch (Exception ex) {
                                LOGGER.error(
                                        "LocalFilesystemHarvester - addMetadata - can't get metadata modified date for metadata uuid= "
                                                +

                                                uuid + ", using current date for modified date");
                                createDate = new ISODate().toString();
                            }
                        }

                        LOGGER.debug("adding new metadata");
                        id = harvester.addMetadata(xml, uuid, schema, localGroups, localCateg, createDate,
                                aligner, false);
                        listOfRecordsToIndex.add(Integer.valueOf(id));
                        result.addedMetadata++;
                    } else {
                        // Check last modified date of the file with the record change date
                        // to check if an update is required
                        if (params.checkFileLastModifiedForUpdate) {
                            Date fileDate = new Date(Files.getLastModifiedTime(file).toMillis());

                            final AbstractMetadata metadata = repo.findOne(id);
                            final ISODate modified;
                            if (metadata != null && metadata.getDataInfo() != null) {
                                modified = metadata.getDataInfo().getChangeDate();
                            } else {
                                modified = new ISODate();
                            }

                            Date recordDate = modified.toDate();

                            changeDate = new ISODate(fileDate.getTime(), false).getDateAndTime();

                            LOGGER.debug(
                                    " File date is: " + fileDate.toString() + " / record date is: " + modified);

                            if (DateUtils.truncate(recordDate, Calendar.SECOND)
                                    .before(DateUtils.truncate(fileDate, Calendar.SECOND))) {
                                LOGGER.debug("  Db record is older than file. Updating record with id: " + id);
                                harvester.updateMetadata(xml, id, localGroups, localCateg, changeDate, aligner);
                                listOfRecordsToIndex.add(Integer.valueOf(id));
                                result.updatedMetadata++;
                            } else {
                                LOGGER.debug(
                                        "  Db record is not older than last modified date of file. No need for update.");
                                result.unchangedMetadata++;
                            }
                        } else {
                            LOGGER.debug("  updating existing metadata, id is: " + id);

                            try {
                                changeDate = dataMan.extractDateModified(schema, xml);
                            } catch (Exception ex) {
                                LOGGER.error(
                                        "LocalFilesystemHarvester - updateMetadata - can't get metadata modified date for "
                                                + "metadata id= " + id
                                                + ", using current date for modified date");
                                changeDate = new ISODate().toString();
                            }

                            harvester.updateMetadata(xml, id, localGroups, localCateg, changeDate, aligner);
                            listOfRecordsToIndex.add(Integer.valueOf(id));
                            result.updatedMetadata++;
                        }
                    }

                    updateMedata(xml, id, changeDate);
                }
            }
            listOfRecords.add(Integer.valueOf(id));
        }
    } catch (Throwable e) {
        LOGGER.error("An error occurred while harvesting a local file:{}. Error is: " + e.getMessage());
    }
    return FileVisitResult.CONTINUE;
}

From source file:org.commonwl.view.cwl.CWLService.java

/**
 * Create a workflow model using cwltool rdf output
 * @param basicModel The basic workflow object created thus far
 * @param workflowFile The workflow file to run cwltool on
 * @return The constructed workflow object
 *//* w w  w  .j av  a2s . c  o m*/
public Workflow parseWorkflowWithCwltool(Workflow basicModel, Path workflowFile, Path workTree)
        throws CWLValidationException {
    GitDetails gitDetails = basicModel.getRetrievedFrom();
    String latestCommit = basicModel.getLastCommit();
    String packedWorkflowID = gitDetails.getPackedId();

    // Get paths to workflow
    String url = basicModel.getIdentifier();
    String workflowFileURI = workflowFile.toAbsolutePath().toUri().toString();
    URI workTreeUri = workTree.toAbsolutePath().toUri();
    String localPath = workflowFileURI;
    String gitPath = gitDetails.getPath();
    if (packedWorkflowID != null) {
        if (packedWorkflowID.charAt(0) != '#') {
            localPath += "#";
            gitPath += "#";
        }
        localPath += packedWorkflowID;
        gitPath += packedWorkflowID;
    }

    // Get RDF representation from cwltool
    if (!rdfService.graphExists(url)) {
        String rdf = cwlTool.getRDF(localPath);
        // Replace /tmp/123123 with permalink base 
        // NOTE: We do not just replace workflowFileURI, all referenced files will also get rewritten
        rdf = rdf.replace(workTreeUri.toString(), "https://w3id.org/cwl/view/git/" + latestCommit + "/");
        // Workaround for common-workflow-language/cwltool#427
        rdf = rdf.replace("<rdfs:>", "<http://www.w3.org/2000/01/rdf-schema#>");

        // Create a workflow model from RDF representation
        Model model = ModelFactory.createDefaultModel();
        model.read(new ByteArrayInputStream(rdf.getBytes()), null, "TURTLE");

        // Store the model
        rdfService.storeModel(url, model);
    }

    // Base workflow details
    String label = FilenameUtils.getName(url);
    String doc = null;
    ResultSet labelAndDoc = rdfService.getLabelAndDoc(url);
    if (labelAndDoc.hasNext()) {
        QuerySolution labelAndDocSoln = labelAndDoc.nextSolution();
        if (labelAndDocSoln.contains("label")) {
            label = labelAndDocSoln.get("label").toString();
        }
        if (labelAndDocSoln.contains("doc")) {
            doc = labelAndDocSoln.get("doc").toString();
        }
    }

    // Inputs
    Map<String, CWLElement> wfInputs = new HashMap<>();
    ResultSet inputs = rdfService.getInputs(url);
    while (inputs.hasNext()) {
        QuerySolution input = inputs.nextSolution();
        String inputName = rdfService.stepNameFromURI(gitPath, input.get("name").toString());

        CWLElement wfInput = new CWLElement();
        if (input.contains("type")) {
            String type;
            if (input.get("type").toString().equals("https://w3id.org/cwl/salad#array")) {
                type = typeURIToString(input.get("items").toString()) + "[]";
            } else {
                type = typeURIToString(input.get("type").toString());
            }
            if (input.contains("null")) {
                type += " (Optional)";
            }
            wfInput.setType(type);
        }
        if (input.contains("format")) {
            String format = input.get("format").toString();
            setFormat(wfInput, format);
        }
        if (input.contains("label")) {
            wfInput.setLabel(input.get("label").toString());
        }
        if (input.contains("doc")) {
            wfInput.setDoc(input.get("doc").toString());
        }
        wfInputs.put(rdfService.labelFromName(inputName), wfInput);
    }

    // Outputs
    Map<String, CWLElement> wfOutputs = new HashMap<>();
    ResultSet outputs = rdfService.getOutputs(url);
    while (outputs.hasNext()) {
        QuerySolution output = outputs.nextSolution();
        CWLElement wfOutput = new CWLElement();

        String outputName = rdfService.stepNameFromURI(gitPath, output.get("name").toString());
        if (output.contains("type")) {
            String type;
            if (output.get("type").toString().equals("https://w3id.org/cwl/salad#array")) {
                type = typeURIToString(output.get("items").toString()) + "[]";
            } else {
                type = typeURIToString(output.get("type").toString());
            }
            if (output.contains("null")) {
                type += " (Optional)";
            }
            wfOutput.setType(type);
        }

        if (output.contains("src")) {
            wfOutput.addSourceID(rdfService.stepNameFromURI(gitPath, output.get("src").toString()));
        }
        if (output.contains("format")) {
            String format = output.get("format").toString();
            setFormat(wfOutput, format);
        }
        if (output.contains("label")) {
            wfOutput.setLabel(output.get("label").toString());
        }
        if (output.contains("doc")) {
            wfOutput.setDoc(output.get("doc").toString());
        }
        wfOutputs.put(rdfService.labelFromName(outputName), wfOutput);
    }

    // Steps
    Map<String, CWLStep> wfSteps = new HashMap<>();
    ResultSet steps = rdfService.getSteps(url);
    while (steps.hasNext()) {
        QuerySolution step = steps.nextSolution();
        String uri = rdfService.stepNameFromURI(gitPath, step.get("step").toString());
        if (wfSteps.containsKey(uri)) {
            // Already got step details, add extra source ID
            if (step.contains("src")) {
                CWLElement src = new CWLElement();
                src.addSourceID(rdfService.stepNameFromURI(gitPath, step.get("src").toString()));
                wfSteps.get(uri).getSources().put(step.get("stepinput").toString(), src);
            } else if (step.contains("default")) {
                CWLElement src = new CWLElement();
                src.setDefaultVal(rdfService.formatDefault(step.get("default").toString()));
                wfSteps.get(uri).getSources().put(step.get("stepinput").toString(), src);
            }
        } else {
            // Add new step
            CWLStep wfStep = new CWLStep();

            IRI workflowPath = iriFactory.construct(url).resolve("./");
            IRI runPath = iriFactory.construct(step.get("run").asResource().getURI());
            wfStep.setRun(workflowPath.relativize(runPath).toString());
            wfStep.setRunType(rdfService.strToRuntype(step.get("runtype").toString()));

            if (step.contains("src")) {
                CWLElement src = new CWLElement();
                src.addSourceID(rdfService.stepNameFromURI(gitPath, step.get("src").toString()));
                Map<String, CWLElement> srcList = new HashMap<>();
                srcList.put(rdfService.stepNameFromURI(gitPath, step.get("stepinput").toString()), src);
                wfStep.setSources(srcList);
            } else if (step.contains("default")) {
                CWLElement src = new CWLElement();
                src.setDefaultVal(rdfService.formatDefault(step.get("default").toString()));
                Map<String, CWLElement> srcList = new HashMap<>();
                srcList.put(rdfService.stepNameFromURI(gitPath, step.get("stepinput").toString()), src);
                wfStep.setSources(srcList);
            }
            if (step.contains("label")) {
                wfStep.setLabel(step.get("label").toString());
            }
            if (step.contains("doc")) {
                wfStep.setDoc(step.get("doc").toString());
            }
            wfSteps.put(rdfService.labelFromName(uri), wfStep);
        }
    }
    // Try to determine license
    ResultSet licenseResult = rdfService.getLicense(url);
    String licenseLink = null;
    if (licenseResult.hasNext()) {
        licenseLink = licenseResult.next().get("license").toString();
    } else {
        // Check for "LICENSE"-like files in root of git repo
        for (String licenseCandidate : new String[] { "LICENSE", "LICENSE.txt", "LICENSE.md" }) {
            // FIXME: This might wrongly match lower-case "license.txt" in case-insensitive file systems
            // but the URL would not work
            if (Files.isRegularFile(workTree.resolve(licenseCandidate))) {
                // Link to it by raw URL
                licenseLink = basicModel.getRetrievedFrom().getRawUrl(null, licenseCandidate);
            }
        }
    }

    // Docker link
    ResultSet dockerResult = rdfService.getDockerLink(url);
    String dockerLink = null;
    if (dockerResult.hasNext()) {
        QuerySolution docker = dockerResult.nextSolution();
        if (docker.contains("pull")) {
            dockerLink = DockerService.getDockerHubURL(docker.get("pull").toString());
        } else {
            dockerLink = "true";
        }
    }

    // Create workflow model
    Workflow workflowModel = new Workflow(label, doc, wfInputs, wfOutputs, wfSteps, dockerLink, licenseLink);

    // Generate DOT graph
    StringWriter graphWriter = new StringWriter();
    RDFDotWriter RDFDotWriter = new RDFDotWriter(graphWriter, rdfService, gitPath);
    try {
        RDFDotWriter.writeGraph(url);
        workflowModel.setVisualisationDot(graphWriter.toString());
    } catch (IOException ex) {
        logger.error("Failed to create DOT graph for workflow: " + ex.getMessage());
    }

    return workflowModel;

}

From source file:org.apache.geode.management.internal.cli.commands.ExportLogsCommand.java

@CliCommand(value = CliStrings.EXPORT_LOGS, help = CliStrings.EXPORT_LOGS__HELP)
@CliMetaData(isFileDownloadOverHttp = true, interceptor = "org.apache.geode.management.internal.cli.commands.ExportLogsInterceptor", relatedTopic = {
        CliStrings.TOPIC_GEODE_SERVER, CliStrings.TOPIC_GEODE_DEBUG_UTIL })
@ResourceOperation(resource = ResourcePermission.Resource.CLUSTER, operation = ResourcePermission.Operation.READ)
public Result exportLogs(
        @CliOption(key = CliStrings.EXPORT_LOGS__DIR, help = CliStrings.EXPORT_LOGS__DIR__HELP) String dirName,
        @CliOption(key = { CliStrings.GROUP,
                CliStrings.GROUPS }, optionContext = ConverterHint.MEMBERGROUP, help = CliStrings.EXPORT_LOGS__GROUP__HELP) String[] groups,
        @CliOption(key = { CliStrings.MEMBER,
                CliStrings.MEMBERS }, optionContext = ConverterHint.ALL_MEMBER_IDNAME, help = CliStrings.EXPORT_LOGS__MEMBER__HELP) String[] memberIds,
        @CliOption(key = CliStrings.EXPORT_LOGS__LOGLEVEL, unspecifiedDefaultValue = DEFAULT_EXPORT_LOG_LEVEL, optionContext = ConverterHint.LOG_LEVEL, help = CliStrings.EXPORT_LOGS__LOGLEVEL__HELP) String logLevel,
        @CliOption(key = CliStrings.EXPORT_LOGS__UPTO_LOGLEVEL, unspecifiedDefaultValue = "false", help = CliStrings.EXPORT_LOGS__UPTO_LOGLEVEL__HELP) boolean onlyLogLevel,
        @CliOption(key = CliStrings.EXPORT_LOGS__MERGELOG, unspecifiedDefaultValue = "false", help = CliStrings.EXPORT_LOGS__MERGELOG__HELP) boolean mergeLog,
        @CliOption(key = CliStrings.EXPORT_LOGS__STARTTIME, help = CliStrings.EXPORT_LOGS__STARTTIME__HELP) String start,
        @CliOption(key = CliStrings.EXPORT_LOGS__ENDTIME, help = CliStrings.EXPORT_LOGS__ENDTIME__HELP) String end,
        @CliOption(key = CliStrings.EXPORT_LOGS__LOGSONLY, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = CliStrings.EXPORT_LOGS__LOGSONLY__HELP) boolean logsOnly,
        @CliOption(key = CliStrings.EXPORT_LOGS__STATSONLY, unspecifiedDefaultValue = "false", specifiedDefaultValue = "true", help = CliStrings.EXPORT_LOGS__STATSONLY__HELP) boolean statsOnly,
        @CliOption(key = CliStrings.EXPORT_LOGS__FILESIZELIMIT, unspecifiedDefaultValue = CliStrings.EXPORT_LOGS__FILESIZELIMIT__UNSPECIFIED_DEFAULT, specifiedDefaultValue = CliStrings.EXPORT_LOGS__FILESIZELIMIT__SPECIFIED_DEFAULT, help = CliStrings.EXPORT_LOGS__FILESIZELIMIT__HELP) String fileSizeLimit) {

    long totalEstimatedExportSize = 0;
    Result result;//from   www .  ja v a2s .  com
    InternalCache cache = getCache();
    try {
        Set<DistributedMember> targetMembers = getMembers(groups, memberIds);

        if (targetMembers.isEmpty()) {
            return ResultBuilder.createUserErrorResult(CliStrings.NO_MEMBERS_FOUND_MESSAGE);
        }

        long userSpecifiedLimit = parseFileSizeLimit(fileSizeLimit);
        if (userSpecifiedLimit > 0) {
            // Get estimated size of exported logs from all servers before exporting anything
            for (DistributedMember server : targetMembers) {
                SizeExportLogsFunction.Args args = new SizeExportLogsFunction.Args(start, end, logLevel,
                        onlyLogLevel, logsOnly, statsOnly);

                List<Object> results = (List<Object>) estimateLogSize(args, server).getResult();
                if (!results.isEmpty()) {
                    if (results.get(0) instanceof Long) {
                        long estimatedSize = (Long) results.get(0);
                        logger.info("Received estimated export size from member {}: {}", server.getId(),
                                estimatedSize);
                        totalEstimatedExportSize += estimatedSize;
                    } else if (results.get(0) instanceof ManagementException) {
                        ManagementException exception = (ManagementException) results.get(0);
                        return ResultBuilder.createUserErrorResult(exception.getMessage());
                    }
                }
            }

            // first check if totalEstimate file size exceeds available disk space on locator
            if (totalEstimatedExportSize > getLocalDiskAvailable()) {
                return ResultBuilder.createUserErrorResult(
                        "Estimated logs size will exceed the available disk space on the locator.");
            }
            // then check if total estimated file size exceeds user specified value
            if (totalEstimatedExportSize > userSpecifiedLimit) {
                StringBuilder sb = new StringBuilder();
                sb.append("Estimated exported logs expanded file size = ").append(totalEstimatedExportSize)
                        .append(", ").append(CliStrings.EXPORT_LOGS__FILESIZELIMIT).append(" = ")
                        .append(userSpecifiedLimit)
                        .append(". To disable exported logs file size check use option \"--file-size-limit=0\".");
                return ResultBuilder.createUserErrorResult(sb.toString());
            }
        }

        // get zipped files from all servers next
        Map<String, Path> zipFilesFromMembers = new HashMap<>();
        for (DistributedMember server : targetMembers) {
            Region region = ExportLogsFunction.createOrGetExistingExportLogsRegion(true, cache);

            ExportLogsCacheWriter cacheWriter = (ExportLogsCacheWriter) region.getAttributes().getCacheWriter();

            cacheWriter.startFile(server.getName());

            CliUtil.executeFunction(new ExportLogsFunction(),
                    new ExportLogsFunction.Args(start, end, logLevel, onlyLogLevel, logsOnly, statsOnly),
                    server).getResult();
            Path zipFile = cacheWriter.endFile();
            ExportLogsFunction.destroyExportLogsRegion(cache);

            // only put the zipfile in the map if it is not null
            if (zipFile != null) {
                logger.info("Received zip file from member {}: {}", server.getId(), zipFile);
                zipFilesFromMembers.put(server.getId(), zipFile);
            }
        }

        if (zipFilesFromMembers.isEmpty()) {
            return ResultBuilder.createUserErrorResult("No files to be exported.");
        }

        Path tempDir = Files.createTempDirectory("exportedLogs");
        // make sure the directory is created, so that even if there is no files unzipped to this
        // dir, we can still zip it and send an empty zip file back to the client
        Path exportedLogsDir = tempDir.resolve("exportedLogs");
        FileUtils.forceMkdir(exportedLogsDir.toFile());

        for (Path zipFile : zipFilesFromMembers.values()) {
            Path unzippedMemberDir = exportedLogsDir
                    .resolve(zipFile.getFileName().toString().replace(".zip", ""));
            ZipUtils.unzip(zipFile.toAbsolutePath().toString(), unzippedMemberDir.toString());
            FileUtils.deleteQuietly(zipFile.toFile());
        }

        Path dirPath;
        if (StringUtils.isBlank(dirName)) {
            dirPath = Paths.get(System.getProperty("user.dir"));
        } else {
            dirPath = Paths.get(dirName);
        }
        Path exportedLogsZipFile = dirPath.resolve("exportedLogs_" + System.currentTimeMillis() + ".zip")
                .toAbsolutePath();

        logger.info("Zipping into: " + exportedLogsZipFile.toString());
        ZipUtils.zipDirectory(exportedLogsDir, exportedLogsZipFile);
        FileUtils.deleteDirectory(tempDir.toFile());

        result = new CommandResult(exportedLogsZipFile);
    } catch (Exception ex) {
        logger.error(ex.getMessage(), ex);
        result = ResultBuilder.createGemFireErrorResult(ex.getMessage());
    } finally {
        ExportLogsFunction.destroyExportLogsRegion(cache);
    }
    if (logger.isDebugEnabled()) {
        logger.debug("Exporting logs returning = {}", result);
    }
    return result;
}

From source file:fll.web.FullTournamentTest.java

/**
 * @param testDataConn/* ww w .j ava 2s. c om*/
 * @param sourceTournament
 * @throws SQLException
 * @throws IOException
 * @throws InterruptedException
 */
private void uploadSchedule(final Connection testDataConn, final Tournament sourceTournament,
        final Path outputDirectory) throws SQLException, IOException, InterruptedException {
    if (TournamentSchedule.scheduleExistsInDatabase(testDataConn, sourceTournament.getTournamentID())) {

        final TournamentSchedule schedule = new TournamentSchedule(testDataConn,
                sourceTournament.getTournamentID());

        final Path outputFile = outputDirectory
                .resolve(sanitizeFilename(sourceTournament.getName()) + "_schedule.csv");
        schedule.writeToCSV(outputFile.toFile());

        // upload the saved file
        IntegrationTestUtils.loadPage(selenium, TestUtils.URL_ROOT + "admin/index.jsp");
        final WebElement fileInput = selenium.findElement(By.name("scheduleFile"));
        fileInput.sendKeys(outputFile.toAbsolutePath().toString());
        selenium.findElement(By.id("upload-schedule")).click();
        Assert.assertFalse(IntegrationTestUtils.isElementPresent(selenium, By.id("error")));

        // check that we're on the choose headers page and set the header
        // mappings
        Assert.assertTrue(selenium.getCurrentUrl().contains("chooseSubjectiveHeaders"));
        final Collection<CategoryColumnMapping> mappings = CategoryColumnMapping.load(testDataConn,
                sourceTournament.getTournamentID());
        for (final CategoryColumnMapping map : mappings) {
            final Select select = new Select(selenium.findElement(By.name(map.getCategoryName() + ":header")));
            select.selectByVisibleText(map.getScheduleColumn());
        }
        selenium.findElement(By.id("submit")).click();

        Thread.sleep(IntegrationTestUtils.WAIT_FOR_PAGE_LOAD_MS);

        // check that we don't have hard violations and skip past soft
        // violations
        assertThat(selenium.getCurrentUrl(), not(containsString("displayHardViolations")));
        if (selenium.getCurrentUrl().contains("displaySoftViolations")) {
            selenium.findElement(By.id("yes")).click();

            Thread.sleep(IntegrationTestUtils.WAIT_FOR_PAGE_LOAD_MS);
        }

        // set event divisions
        if (selenium.getCurrentUrl().contains("promptForEventDivision")) {
            selenium.findElement(By.id("yes")).click();

            Thread.sleep(IntegrationTestUtils.WAIT_FOR_PAGE_LOAD_MS);

            // assume the values are fine
            assertThat(selenium.getCurrentUrl(), containsString("displayEventDivisionConfirmation"));
            selenium.findElement(By.id("yes")).click();

            Thread.sleep(IntegrationTestUtils.WAIT_FOR_PAGE_LOAD_MS);
        }

        // check that it all worked
        Assert.assertFalse(IntegrationTestUtils.isElementPresent(selenium, By.id("error")));
        Assert.assertTrue(IntegrationTestUtils.isElementPresent(selenium, By.id("success")));
    }
}

From source file:org.jboss.as.test.integration.logging.handlers.SocketHandlerTestCase.java

private ModelNode addSocketHandler(final String name, final String level, final String protocol,
        final Path keyStore) throws IOException {
    final CompositeOperationBuilder builder = CompositeOperationBuilder.create();
    // Add a socket handler
    final ModelNode address = SUBSYSTEM_ADDRESS.append("socket-handler", name).toModelNode();
    ModelNode op = Operations.createAddOperation(address);
    op.get("named-formatter").set(FORMATTER_NAME);
    op.get("outbound-socket-binding-ref").set(SOCKET_BINDING_NAME);
    if (level != null) {
        op.get("level").set(level);
    }/*from  w ww .j av  a 2 s  .  c  om*/
    if (protocol != null) {
        op.get("protocol").set(protocol);
    }
    if (keyStore != null) {
        // We need to add the SSL context to Elytron
        final ModelNode keyStoreAddress = Operations.createAddress("subsystem", "elytron", "key-store",
                "log-test-ks");
        resourcesToRemove.addFirst(keyStoreAddress);
        final ModelNode keyStoreAddOp = Operations.createAddOperation(keyStoreAddress);
        keyStoreAddOp.get("path").set(keyStore.toAbsolutePath().toString());
        keyStoreAddOp.get("type").set("JKS");
        final ModelNode creds = keyStoreAddOp.get("credential-reference").setEmptyObject();
        creds.get("clear-text").set(TEST_PASSWORD);
        builder.addStep(keyStoreAddOp);

        final ModelNode keyManagerAddress = Operations.createAddress("subsystem", "elytron", "trust-manager",
                "log-test-tm");
        resourcesToRemove.addLast(keyManagerAddress);
        final ModelNode keyManagerAddOp = Operations.createAddOperation(keyManagerAddress);
        keyManagerAddOp.get("key-store").set("log-test-ks");
        builder.addStep(keyManagerAddOp);

        final ModelNode sslContextAddress = Operations.createAddress("subsystem", "elytron",
                "client-ssl-context", "log-test-ssl-context");
        resourcesToRemove.addLast(sslContextAddress);
        final ModelNode sslContextAddOp = Operations.createAddOperation(sslContextAddress);
        sslContextAddOp.get("trust-manager").set("log-test-tm");
        sslContextAddOp.get("protocols").setEmptyList().add("TLSv1.2");
        builder.addStep(sslContextAddOp);

        op.get("ssl-context").set("log-test-ssl-context");
    }
    builder.addStep(op);
    resourcesToRemove.addFirst(address);

    // Add the handler to the logger
    op = Operations.createOperation("add-handler", LOGGER_ADDRESS);
    op.get("name").set(name);
    builder.addStep(op);
    executeOperation(builder.build());
    return address;
}

From source file:org.opencb.opencga.analysis.execution.plugins.ibs.IbsAnalysis.java

@Override
public int run(Map<String, Path> input, Path outdir, ObjectMap params) throws Exception {
    CatalogManager catalogManager = getCatalogManager();
    String sessionId = getSessionId();
    long studyId = getStudyId();

    IdentityByStateClustering ibsc = new IdentityByStateClustering();
    List<String> samples;
    Query query = new Query(VariantDBAdaptor.VariantQueryParams.STUDIES.key(), studyId);
    QueryOptions options = new QueryOptions(QueryOptions.EXCLUDE, VariantField.ANNOTATION);

    Query samplesQuery = new Query();
    if (StringUtils.isNotEmpty(params.getString(SAMPLES))) {
        List<Long> sampleIds = catalogManager.getSampleIds(params.getString(SAMPLES), sessionId);
        samplesQuery.append(SampleDBAdaptor.QueryParams.ID.key(), sampleIds);
        query.append(VariantDBAdaptor.VariantQueryParams.RETURNED_SAMPLES.key(), sampleIds);
    }//from   www.  ja  v  a 2s.c o  m
    samples = catalogManager.getAllSamples(studyId, samplesQuery, new QueryOptions(), sessionId).getResult()
            .stream().map(Sample::getName).collect(Collectors.toList());

    List<IdentityByState> identityByStateList;
    try (VariantDBIterator iterator = getVariantStorageManager().iterable(sessionId).iterator(query, options)) {
        identityByStateList = ibsc.countIBS(iterator, samples);
    }
    if ("-".equals(outdir.getFileName().toString())) {
        ibsc.write(System.out, identityByStateList, samples);
    } else {
        Path outfile;
        if (outdir.toAbsolutePath().toFile().isDirectory()) {
            String alias = catalogManager.getStudy(studyId, sessionId).first().getAlias();
            outfile = outdir.resolve(alias + ".genome.gz");
        } else {
            outfile = outdir;
        }

        try (OutputStream outputStream = new GZIPOutputStream(new FileOutputStream(outfile.toFile()))) {
            ibsc.write(outputStream, identityByStateList, samples);
        }
    }

    return 0;
}