Example usage for java.net URI resolve

List of usage examples for java.net URI resolve

Introduction

In this page you can find the example usage for java.net URI resolve.

Prototype

public URI resolve(String str) 

Source Link

Document

Constructs a new URI by parsing the given string and then resolving it against this URI.

Usage

From source file:de.sub.goobi.metadaten.Metadaten.java

/**
 * identifyImage./*from   w  w w .j a v  a2 s .  c o m*/
 *
 * @param welches
 *            int
 */
public void identifyImage(int welches) {
    /*
     * wenn die Bilder nicht angezeigt werden, brauchen wir auch das Bild
     * nicht neu umrechnen
     */
    logger.trace("start identifyImage 1");
    if (!this.displayImage) {
        logger.trace("end identifyImage 1");
        return;
    }
    logger.trace("ocr identifyImage");
    this.ocrResult = "";

    logger.trace("dataList");
    List<URI> dataList = this.imageHelper.getImageFiles(digitalDocument.getPhysicalDocStruct());
    logger.trace("dataList 2");
    if (ConfigCore.getBooleanParameter(Parameters.WITH_AUTOMATIC_PAGINATION, true)
            && (dataList == null || dataList.isEmpty())) {
        try {
            createPagination();
            dataList = this.imageHelper.getImageFiles(digitalDocument.getPhysicalDocStruct());
        } catch (IOException | TypeNotAllowedForParentException e) {
            logger.error(e);
        }
    }
    if (dataList != null && dataList.size() > 0) {
        logger.trace("dataList not null");
        this.lastImage = dataList.size();
        logger.trace("myBildLetztes");
        for (int i = 0; i < dataList.size(); i++) {
            if (logger.isTraceEnabled()) {
                logger.trace("file: " + i);
            }
            if (this.image == null) {
                this.image = dataList.get(0);
            }
            /* wenn das aktuelle Bild gefunden ist, das neue ermitteln */
            if (isCurrentImageCorrectImage(dataList, i)) {
                logger.trace("index == picture");
                int pos = i + welches;
                if (logger.isTraceEnabled()) {
                    logger.trace("pos: " + pos);
                }
                /* aber keine Indexes ausserhalb des Array erlauben */
                if (pos < 0) {
                    pos = 0;
                }
                if (pos > dataList.size() - 1) {
                    pos = dataList.size() - 1;
                }
                if (this.currentTifFolder != null) {
                    if (logger.isTraceEnabled()) {
                        logger.trace("currentTifFolder: " + this.currentTifFolder);
                    }
                    dataList = this.imageHelper.getImageFiles(this.process, this.currentTifFolder);
                    if (dataList == null) {
                        return;
                    }
                }
                /* das aktuelle tif erfassen */
                if (dataList.size() > pos) {
                    this.image = dataList.get(pos);
                } else {
                    this.image = dataList.get(dataList.size() - 1);
                }
                logger.trace("found myBild");
                /* die korrekte Seitenzahl anzeigen */
                this.imageNumber = pos + 1;
                if (logger.isTraceEnabled()) {
                    logger.trace("myBildNummer: " + this.imageNumber);
                }
                /* Pages-Verzeichnis ermitteln */
                URI myPfad = ConfigCore.getTempImagesPathAsCompleteDirectory();
                if (logger.isTraceEnabled()) {
                    logger.trace("myPfad: " + myPfad);
                }
                /*
                 * den Counter fr die Bild-ID auf einen neuen Wert setzen,
                 * damit nichts gecacht wird
                 */
                this.imageCounter++;
                if (logger.isTraceEnabled()) {
                    logger.trace("myBildCounter: " + this.imageCounter);
                }

                /* Session ermitteln */
                FacesContext context = FacesContext.getCurrentInstance();
                HttpSession session = (HttpSession) context.getExternalContext().getSession(false);
                String mySession = session.getId() + "_" + this.imageCounter + ".png";
                logger.trace("facescontext");

                /* das neue Bild zuweisen */
                try {
                    URI tiffconverterpfad = fileService.getImagesDirectory(this.process)
                            .resolve(this.currentTifFolder + "/" + this.image);
                    if (logger.isTraceEnabled()) {
                        logger.trace("tiffconverterpfad: " + tiffconverterpfad);
                    }
                    if (!fileService.fileExist(tiffconverterpfad)) {
                        tiffconverterpfad = serviceManager.getProcessService()
                                .getImagesTifDirectory(true, this.process).resolve(this.image);
                        Helper.setFehlerMeldung("formularOrdner:TifFolders", "",
                                "image " + this.image + " does not exist in folder " + this.currentTifFolder
                                        + ", using image from " + new File(serviceManager.getProcessService()
                                                .getImagesTifDirectory(true, this.process)).getName());
                    }
                    this.imageHelper.scaleFile(tiffconverterpfad, myPfad.resolve(mySession), this.imageSize,
                            this.imageRotation);
                    logger.trace("scaleFile");
                } catch (Exception e) {
                    Helper.setFehlerMeldung("could not getById image folder", e);
                    logger.error(e);
                }
                break;
            }
        }
    }
    checkImage();
}

From source file:com.zimbra.client.ZMailbox.java

private URI getUploadURI(boolean limitByFileUploadMaxSize) throws ServiceException {
    try {//ww w. ja  v a2  s  .c  om
        URI uri = new URI(mTransport.getURI());
        return uri.resolve("/service/upload?fmt=raw" + (limitByFileUploadMaxSize ? "&lbfums" : ""));
    } catch (URISyntaxException e) {
        throw ZClientException.CLIENT_ERROR("unable to parse URI: " + mTransport.getURI(), e);
    }
}

From source file:com.zimbra.client.ZMailbox.java

/**
 * returns a rest URL relative to this mailbox.
 * @param relativePath a relative path (i.e., "/Calendar", "Inbox?fmt=rss", etc).
 * @param alternateUrl alternate url to connect to
 * @return URI of path//from w w  w. j a va 2s  .  c o m
 * @throws ServiceException on error
 */
private URI getRestURI(String relativePath, String alternateUrl) throws ServiceException {
    String pathPrefix = "/";
    if (relativePath.startsWith("/")) {
        pathPrefix = "";
    }

    try {
        String restURI = getAccountInfo(false).getRestURLBase();
        if (alternateUrl != null) {
            // parse the URI and extract path
            URI uri = new URI(restURI);
            restURI = alternateUrl + uri.getPath();
        }

        if (restURI == null) {
            URI uri = new URI(mTransport.getURI());
            return uri.resolve("/home/" + getName() + pathPrefix + relativePath);
        } else {
            return new URI(restURI + pathPrefix + relativePath);
        }
    } catch (URISyntaxException e) {
        throw ZClientException.CLIENT_ERROR("unable to parse URI: " + mTransport.getURI(), e);
    }
}

From source file:de.sub.goobi.export.download.ExportPdf.java

@Override
public boolean startExport(Process myProcess, URI inZielVerzeichnis) throws ReadException, IOException,
        PreferencesException, TypeNotAllowedForParentException, WriteException {

    /*/*from w  w  w  .j av a2  s . c  o m*/
     * Read Document
     */
    Fileformat gdzfile = serviceManager.getProcessService().readMetadataFile(myProcess);
    URI zielVerzeichnis = prepareUserDirectory(inZielVerzeichnis);
    this.myPrefs = serviceManager.getRulesetService().getPreferences(myProcess.getRuleset());

    /*
     * first of all write mets-file in images-Folder of process
     */
    URI metsTempFile = fileService.createResource(myProcess.getTitle() + ".xml");
    writeMetsFile(myProcess, metsTempFile, gdzfile, true);
    Helper.setMeldung(null, myProcess.getTitle() + ": ", "mets file created");
    Helper.setMeldung(null, myProcess.getTitle() + ": ", "start pdf generation now");

    if (logger.isDebugEnabled()) {
        logger.debug("METS file created: " + metsTempFile);
    }

    FacesContext context = FacesContext.getCurrentInstance();
    HttpServletRequest req = (HttpServletRequest) context.getExternalContext().getRequest();
    String fullpath = req.getRequestURL().toString();
    String servletpath = context.getExternalContext().getRequestServletPath();
    String myBasisUrl = fullpath.substring(0, fullpath.indexOf(servletpath));

    if (!ConfigCore.getBooleanParameter("pdfAsDownload")) {
        /*
         * use contentserver api for creation of pdf-file
         */
        CreatePdfFromServletThread pdf = new CreatePdfFromServletThread();
        pdf.setMetsURL(metsTempFile.toURL());
        pdf.setTargetFolder(zielVerzeichnis);
        pdf.setInternalServletPath(myBasisUrl);
        if (logger.isDebugEnabled()) {
            logger.debug("Taget directory: " + zielVerzeichnis);
            logger.debug("Using ContentServer2 base URL: " + myBasisUrl);
        }
        pdf.initialize(myProcess);
        pdf.start();
    } else {

        GetMethod method = null;
        try {
            /*
             * define path for mets and pdfs
             */
            URL kitodoContentServerUrl = null;
            String contentServerUrl = ConfigCore.getParameter("kitodoContentServerUrl");
            Integer contentServerTimeOut = ConfigCore.getIntParameter("kitodoContentServerTimeOut", 60000);

            /*
             * using mets file
             */

            if (new MetadatenVerifizierung().validate(myProcess) && metsTempFile.toURL() != null) {
                /*
                 * if no contentserverurl defined use internal
                 * goobiContentServerServlet
                 */
                if (contentServerUrl == null || contentServerUrl.length() == 0) {
                    contentServerUrl = myBasisUrl + "/gcs/gcs?action=pdf&metsFile=";
                }
                kitodoContentServerUrl = new URL(contentServerUrl + metsTempFile.toURL()
                        + AND_TARGET_FILE_NAME_IS + myProcess.getTitle() + PDF_EXTENSION);
                /*
                 * mets data does not exist or is invalid
                 */

            } else {
                if (contentServerUrl == null || contentServerUrl.length() == 0) {
                    contentServerUrl = myBasisUrl + "/cs/cs?action=pdf&images=";
                }
                FilenameFilter filter = new FileNameMatchesFilter("\\d*\\.tif");
                URI imagesDir = serviceManager.getProcessService().getImagesTifDirectory(true, myProcess);
                ArrayList<URI> meta = fileService.getSubUris(filter, imagesDir);
                int capacity = contentServerUrl.length() + (meta.size() - 1) + AND_TARGET_FILE_NAME_IS.length()
                        + myProcess.getTitle().length() + PDF_EXTENSION.length();
                TreeSet<String> filenames = new TreeSet<>(new MetadatenHelper(null, null));
                for (URI data : meta) {
                    String file = data.toURL().toString();
                    filenames.add(file);
                    capacity += file.length();
                }
                StringBuilder url = new StringBuilder(capacity);
                url.append(contentServerUrl);
                boolean subsequent = false;
                for (String f : filenames) {
                    if (subsequent) {
                        url.append('$');
                    } else {
                        subsequent = true;
                    }
                    url.append(f);
                }
                url.append(AND_TARGET_FILE_NAME_IS);
                url.append(myProcess.getTitle());
                url.append(PDF_EXTENSION);
                kitodoContentServerUrl = new URL(url.toString());
            }

            /*
             * get pdf from servlet and forward response to file
             */
            method = new GetMethod(kitodoContentServerUrl.toString());
            method.getParams().setParameter("http.socket.timeout", contentServerTimeOut);

            if (!context.getResponseComplete()) {
                HttpServletResponse response = (HttpServletResponse) context.getExternalContext().getResponse();
                String fileName = myProcess.getTitle() + PDF_EXTENSION;
                ServletContext servletContext = (ServletContext) context.getExternalContext().getContext();
                String contentType = servletContext.getMimeType(fileName);
                response.setContentType(contentType);
                response.setHeader("Content-Disposition", "attachment;filename=\"" + fileName + "\"");
                response.sendRedirect(kitodoContentServerUrl.toString());
                context.responseComplete();
            }
            if (metsTempFile.toURL() != null) {
                File tempMets = new File(metsTempFile.toURL().toString());
                tempMets.delete();
            }
        } catch (Exception e) {

            /*
             * report Error to User as Error-Log
             */
            String text = "error while pdf creation: " + e.getMessage();
            URI uri = zielVerzeichnis.resolve(myProcess.getTitle() + ".PDF-ERROR.log");
            try (BufferedWriter output = new BufferedWriter(new OutputStreamWriter(fileService.write(uri)))) {
                output.write(text);
            } catch (IOException e1) {
                logger.error(e1);
            }
            return false;
        } finally {
            if (method != null) {
                method.releaseConnection();
            }
        }
    }
    return true;
}

From source file:org.opencb.opencga.storage.core.variant.VariantStoragePipeline.java

/**
 * Transform raw variant files into biodata model.
 *
 * @param inputUri Input file. Accepted formats: *.vcf, *.vcf.gz
 * @param pedigreeUri Pedigree input file. Accepted formats: *.ped
 * @param outputUri The destination folder
 * @throws StorageEngineException If any IO problem
 *///from w  ww. ja  v a2s . c  o  m
@Override
public URI transform(URI inputUri, URI pedigreeUri, URI outputUri) throws StorageEngineException {
    // input: VcfReader
    // output: JsonWriter

    Path input = Paths.get(inputUri.getPath());
    Path pedigree = pedigreeUri == null ? null : Paths.get(pedigreeUri.getPath());
    Path output = Paths.get(outputUri.getPath());

    //        boolean includeSamples = options.getBoolean(Options.INCLUDE_GENOTYPES.key(), false);
    boolean includeStats = options.getBoolean(Options.INCLUDE_STATS.key(), false);
    //        boolean includeSrc = options.getBoolean(Options.INCLUDE_SRC.key(), Options.INCLUDE_SRC.defaultValue());
    boolean includeSrc = false;
    boolean failOnError = options.getBoolean(Options.TRANSFORM_FAIL_ON_MALFORMED_VARIANT.key(),
            Options.TRANSFORM_FAIL_ON_MALFORMED_VARIANT.defaultValue());
    String format = options.getString(Options.TRANSFORM_FORMAT.key(), Options.TRANSFORM_FORMAT.defaultValue());
    String parser = options.getString("transform.parser", HTSJDK_PARSER);

    VariantSource source = buildVariantSource(input);
    String fileName = source.getFileName();
    boolean generateReferenceBlocks = options.getBoolean(Options.GVCF.key(), false);

    int batchSize = options.getInt(Options.TRANSFORM_BATCH_SIZE.key(),
            Options.TRANSFORM_BATCH_SIZE.defaultValue());

    String compression = options.getString(Options.COMPRESS_METHOD.key(),
            Options.COMPRESS_METHOD.defaultValue());
    String extension = "";
    int numTasks = options.getInt(Options.TRANSFORM_THREADS.key(), Options.TRANSFORM_THREADS.defaultValue());
    int capacity = options.getInt("blockingQueueCapacity", numTasks * 2);

    if ("gzip".equalsIgnoreCase(compression) || "gz".equalsIgnoreCase(compression)) {
        extension = ".gz";
    } else if ("snappy".equalsIgnoreCase(compression) || "snz".equalsIgnoreCase(compression)) {
        extension = ".snappy";
    } else if (!compression.isEmpty()) {
        throw new IllegalArgumentException("Unknown compression method " + compression);
    }

    Path outputMalformedVariants = output.resolve(fileName + "." + VariantReaderUtils.MALFORMED_FILE + ".txt");
    Path outputVariantsFile = output
            .resolve(fileName + "." + VariantReaderUtils.VARIANTS_FILE + "." + format + extension);
    Path outputMetaFile = VariantReaderUtils.getMetaFromTransformedFile(outputVariantsFile);

    // Close at the end!
    final MalformedVariantHandler malformedHandler;
    try {
        malformedHandler = new MalformedVariantHandler(outputMalformedVariants);
    } catch (IOException e) {
        throw new StorageEngineException(e.getMessage(), e);
    }

    ParallelTaskRunner.Config config = ParallelTaskRunner.Config.builder().setNumTasks(numTasks)
            .setBatchSize(batchSize).setCapacity(capacity).setSorted(true).build();

    logger.info("Transforming variants using {} into {} ...", parser, format);
    long start, end;
    if (numTasks == 1 && "json".equals(format)) { //Run transformation with a SingleThread runner. The legacy way
        if (!".gz".equals(extension)) { //FIXME: Add compatibility with snappy compression
            logger.warn("Force using gzip compression");
            extension = ".gz";
            outputVariantsFile = output.resolve(fileName + ".variants.json" + extension);
        }

        //Ped Reader
        PedigreeReader pedReader = null;
        if (pedigree != null && pedigree.toFile().exists()) { //FIXME Add "endsWith(".ped") ??
            pedReader = new PedigreePedReader(pedigree.toString());
        }

        //Reader
        VariantReader reader = new VariantVcfReader(source, input.toAbsolutePath().toString());

        //Writers
        VariantJsonWriter jsonWriter = new VariantJsonWriter(source, output);
        jsonWriter.includeStats(includeStats);

        List<VariantWriter> writers = Collections.<VariantWriter>singletonList(jsonWriter);

        //Runner
        VariantRunner vr = new VariantRunner(source, reader, pedReader, writers,
                Collections.<Task<Variant>>singletonList(new VariantGlobalStatsCalculator(source)), batchSize);

        logger.info("Single thread transform...");
        start = System.currentTimeMillis();
        try {
            vr.run();
        } catch (IOException e) {
            throw new StorageEngineException("Fail runner execution", e);
        }
        end = System.currentTimeMillis();

    } else if ("avro".equals(format)) {
        //Read VariantSource
        source = VariantReaderUtils.readVariantSource(input, source);

        //Reader
        StringDataReader dataReader = new StringDataReader(input);
        long fileSize = 0;
        try {
            fileSize = dataReader.getFileSize();
        } catch (IOException e) {
            throw new StorageEngineException("Error reading file " + input, e);
        }
        ProgressLogger progressLogger = new ProgressLogger("Transforming file:", fileSize, 200);
        dataReader.setReadBytesListener((totalRead, delta) -> progressLogger.increment(delta, "Bytes"));

        //Writer
        DataWriter<ByteBuffer> dataWriter;
        try {
            dataWriter = new AvroFileWriter<>(VariantAvro.getClassSchema(), compression,
                    new FileOutputStream(outputVariantsFile.toFile()));
        } catch (FileNotFoundException e) {
            throw new StorageEngineException("Fail init writer", e);
        }
        Supplier<VariantTransformTask<ByteBuffer>> taskSupplier;

        if (parser.equalsIgnoreCase(HTSJDK_PARSER)) {
            logger.info("Using HTSJDK to read variants.");
            FullVcfCodec codec = new FullVcfCodec();
            final VariantSource finalSource = source;
            Pair<VCFHeader, VCFHeaderVersion> header = readHtsHeader(input);
            VariantGlobalStatsCalculator statsCalculator = new VariantGlobalStatsCalculator(source);
            taskSupplier = () -> new VariantAvroTransformTask(header.getKey(), header.getValue(), finalSource,
                    outputMetaFile, statsCalculator, includeSrc, generateReferenceBlocks)
                            .setFailOnError(failOnError).addMalformedErrorHandler(malformedHandler);
        } else {
            // TODO Create a utility to determine which extensions are variants files
            final VariantVcfFactory factory = createVariantVcfFactory(source, fileName);
            logger.info("Using Biodata to read variants.");
            final VariantSource finalSource = source;
            VariantGlobalStatsCalculator statsCalculator = new VariantGlobalStatsCalculator(source);
            taskSupplier = () -> new VariantAvroTransformTask(factory, finalSource, outputMetaFile,
                    statsCalculator, includeSrc).setFailOnError(failOnError)
                            .addMalformedErrorHandler(malformedHandler);
        }

        logger.info("Generating output file {}", outputVariantsFile);

        ParallelTaskRunner<String, ByteBuffer> ptr;
        try {
            ptr = new ParallelTaskRunner<>(dataReader, taskSupplier, dataWriter, config);
        } catch (Exception e) {
            throw new StorageEngineException("Error while creating ParallelTaskRunner", e);
        }
        logger.info("Multi thread transform... [1 reading, {} transforming, 1 writing]", numTasks);
        start = System.currentTimeMillis();
        try {
            ptr.run();
        } catch (ExecutionException e) {
            throw new StorageEngineException("Error while executing TransformVariants in ParallelTaskRunner",
                    e);
        }
        end = System.currentTimeMillis();
    } else if ("json".equals(format)) {
        //Read VariantSource
        source = VariantReaderUtils.readVariantSource(input, source);

        //Reader
        StringDataReader dataReader = new StringDataReader(input);
        long fileSize = 0;
        try {
            fileSize = dataReader.getFileSize();
        } catch (IOException e) {
            throw new StorageEngineException("Error reading file " + input, e);
        }
        ProgressLogger progressLogger = new ProgressLogger("Transforming file:", fileSize, 200);
        dataReader.setReadBytesListener((totalRead, delta) -> progressLogger.increment(delta, "Bytes"));

        //Writers
        StringDataWriter dataWriter = new StringDataWriter(outputVariantsFile, true);

        final VariantSource finalSource = source;
        ParallelTaskRunner<String, String> ptr;

        Supplier<VariantTransformTask<String>> taskSupplier;
        if (parser.equalsIgnoreCase(HTSJDK_PARSER)) {
            logger.info("Using HTSJDK to read variants.");
            Pair<VCFHeader, VCFHeaderVersion> header = readHtsHeader(input);
            VariantGlobalStatsCalculator statsCalculator = new VariantGlobalStatsCalculator(finalSource);
            taskSupplier = () -> new VariantJsonTransformTask(header.getKey(), header.getValue(), finalSource,
                    outputMetaFile, statsCalculator, includeSrc, generateReferenceBlocks)
                            .setFailOnError(failOnError).addMalformedErrorHandler(malformedHandler);
        } else {
            // TODO Create a utility to determine which extensions are variants files
            final VariantVcfFactory factory = createVariantVcfFactory(source, fileName);
            logger.info("Using Biodata to read variants.");
            VariantGlobalStatsCalculator statsCalculator = new VariantGlobalStatsCalculator(source);
            taskSupplier = () -> new VariantJsonTransformTask(factory, finalSource, outputMetaFile,
                    statsCalculator, includeSrc).setFailOnError(failOnError)
                            .addMalformedErrorHandler(malformedHandler);
        }

        logger.info("Generating output file {}", outputVariantsFile);

        try {
            ptr = new ParallelTaskRunner<>(dataReader, taskSupplier, dataWriter, config);
        } catch (Exception e) {
            throw new StorageEngineException("Error while creating ParallelTaskRunner", e);
        }

        logger.info("Multi thread transform... [1 reading, {} transforming, 1 writing]", numTasks);
        start = System.currentTimeMillis();
        try {
            ptr.run();
        } catch (ExecutionException e) {
            throw new StorageEngineException("Error while executing TransformVariants in ParallelTaskRunner",
                    e);
        }
        end = System.currentTimeMillis();
    } else if ("proto".equals(format)) {
        //Read VariantSource
        source = VariantReaderUtils.readVariantSource(input, source);
        Pair<Long, Long> times = processProto(input, fileName, output, source, outputVariantsFile,
                outputMetaFile, includeSrc, parser, generateReferenceBlocks, batchSize, extension, compression,
                malformedHandler, failOnError);
        start = times.getKey();
        end = times.getValue();
    } else {
        throw new IllegalArgumentException("Unknown format " + format);
    }
    logger.info("end - start = " + (end - start) / 1000.0 + "s");
    logger.info("Variants transformed!");

    // Close the malformed variant handler
    malformedHandler.close();
    if (malformedHandler.getMalformedLines() > 0) {
        getTransformStats().put("malformed lines", malformedHandler.getMalformedLines());
    }

    return outputUri.resolve(outputVariantsFile.getFileName().toString());
}