Example usage for java.net URI getAuthority

List of usage examples for java.net URI getAuthority

Introduction

In this page you can find the example usage for java.net URI getAuthority.

Prototype

public String getAuthority() 

Source Link

Document

Returns the decoded authority component of this URI.

Usage

From source file:org.dataconservancy.packaging.impl.UriUtility.java

/**
 * Resolves the supplied {@code bag://} URI against a platform-specific base directory. This method is used to
 * resolve resources in a bag to a platform-specific {@code Path} used by the caller to access the content of the
 * resource.//from   w  ww.j a  v  a 2s.co m
 * <p>
 * Example usage: Given a bag that contains a resource identified by the URI {@code bag://my-bag/data/bar}, and
 * the bag has been exploded into the directory {@code /tmp/foo/my-bag} (where the bag payload directory is
 * located at {@code /tmp/foo/my-bag/data}) then the base directory of the bag is {@code /tmp/foo}. If the caller
 * wishes to resolve the URI {@code bag://my-bag/data/bar}, they would invoke this method:
 * </p>
 * <pre>
 *     Path result = UriUtility.resolveBagUri(Paths.get("/tmp/foo"), new URI("bag://my-bag/data/bar"));
 *     assert Paths.get("/tmp/foo/my-bag/data/bar").equals(result);
 * </pre>
 * <p>
 * The base directory does not need to exist. This implementation will {@link Path#normalize() normalize} the
 * supplied directory.
 * </p>
 * <p>
 * The {@code bag://} URI is converted to a path by concatenating the authority portion of the URI with the path
 * portion.
 * </p>
 * <p>
 * If the supplied {@code bagUri} is <em>not</em> a URI with the {@code bag} scheme, an
 * {@code IllegalArgumentException} is thrown.
 * </p>
 *
 * @param baseDir the base directory that contains the bag
 * @param bagUri a URI identifying a resource in a bag
 * @return a platform-specific {@code Path}, used to access the contents of the resource identified by
 *         {@code bagUri}
 * @throws IllegalArgumentException if the supplied bagUri is null or empty, if {@code baseDir} is null, if
 *         {@code bagUri} does not have scheme {@code bag}
 * @throws RuntimeException if the supplied base directory cannot be normalized
 */
public static Path resolveBagUri(final Path baseDir, final URI bagUri) {
    if (bagUri == null) {
        throw new IllegalArgumentException(
                String.format(ERR_RESOLVE_BAGURI + "bag uri was null.", "null", baseDir));
    }

    if (!bagUri.getScheme().equals(BAG_URI_SCHEME)) {
        throw new IllegalArgumentException(
                String.format(ERR_RESOLVE_BAGURI + "bag uri had incorrect scheme.", bagUri, baseDir));
    }

    if (baseDir == null) {
        throw new IllegalArgumentException(
                String.format(ERR_RESOLVE_BAGURI + "base directory was null", bagUri, "null"));
    }

    // normalize the base directory path
    final Path originalDir = baseDir;
    final Path normalizedDir = baseDir.normalize();

    if (normalizedDir == null) {
        throw new RuntimeException(String.format(ERR_RESOLVE_BAGURI + "failed to normalize the base directory.",
                bagUri, originalDir));
    }

    final Path bagPath = Paths.get(bagUri.getAuthority(), bagUri.getPath());

    return normalizedDir.resolve(bagPath);
}

From source file:com.marvelution.gadgets.sonar.servlet.SonarMakeRequestServlet.java

/**
 * Get the {@link Host} from a given {@link URI}
 * //from   w ww.  ja  va 2s .c  o  m
 * @param uri the {@link URI}
 * @return the {@link Host}
 */
private Host getHost(URI uri) {
    StringBuilder hostUri = new StringBuilder();
    hostUri.append(uri.getScheme()).append("://");
    if (uri.getAuthority().indexOf("@") > -1) {
        hostUri.append(uri.getAuthority().substring(uri.getAuthority().lastIndexOf("@") + 1));
    } else {
        hostUri.append(uri.getAuthority());
    }
    Host host = new Host(hostUri.toString());
    if (uri.getAuthority().indexOf("@") > -1) {
        String userInfo = uri.getAuthority().substring(0, uri.getAuthority().lastIndexOf("@"));
        host.setUsername(userInfo.substring(0, userInfo.indexOf(":")));
        host.setPassword(userInfo.substring(userInfo.indexOf(":") + 1));
    }
    return host;
}

From source file:io.druid.firehose.oss.StaticOSSFirehoseFactory.java

@Override
public Firehose connect(StringInputRowParser firehoseParser) throws IOException {

    Preconditions.checkNotNull(ossClient, "null ossClient");

    final LinkedList<URI> objectQueue = Lists.newLinkedList(uris);

    return new FileIteratingFirehose(new Iterator<LineIterator>() {
        @Override/*ww  w .ja  va 2 s  .c  om*/
        public boolean hasNext() {
            return !objectQueue.isEmpty();
        }

        @Override
        public LineIterator next() {
            final URI nextURI = objectQueue.poll();

            final String bucket = nextURI.getAuthority();
            final String key = nextURI.getPath().startsWith("/") ? nextURI.getPath().substring(1)
                    : nextURI.getPath();

            log.info("reading from bucket[%s] object[%s] (%s)", bucket, key, nextURI);

            try {
                final InputStream innerInputStream = ossClient.getObject(bucket, key).getObjectContent();

                final InputStream outerInputStream = key.endsWith(".gz")
                        ? CompressionUtils.gzipInputStream(innerInputStream)
                        : innerInputStream;

                return IOUtils.lineIterator(
                        new BufferedReader(new InputStreamReader(outerInputStream, Charsets.UTF_8)));
            } catch (Exception e) {
                log.error(e, "exception reading from bucket[%s] object[%s]", bucket, key);

                throw Throwables.propagate(e);
            }
        }

        @Override
        public void remove() {
            throw new UnsupportedOperationException();
        }
    }, firehoseParser);
}

From source file:io.druid.firehose.s3.StaticS3FirehoseFactory.java

@Override
public Firehose connect(StringInputRowParser firehoseParser) throws IOException {
    Preconditions.checkNotNull(s3Client, "null s3Client");

    final LinkedList<URI> objectQueue = Lists.newLinkedList(uris);

    return new FileIteratingFirehose(new Iterator<LineIterator>() {
        @Override// w  w w.j  ava 2s.c o  m
        public boolean hasNext() {
            return !objectQueue.isEmpty();
        }

        @Override
        public LineIterator next() {
            final URI nextURI = objectQueue.poll();

            final String s3Bucket = nextURI.getAuthority();
            final S3Object s3Object = new S3Object(
                    nextURI.getPath().startsWith("/") ? nextURI.getPath().substring(1) : nextURI.getPath());

            log.info("Reading from bucket[%s] object[%s] (%s)", s3Bucket, s3Object.getKey(), nextURI);

            try {
                final InputStream innerInputStream = s3Client
                        .getObject(new S3Bucket(s3Bucket), s3Object.getKey()).getDataInputStream();

                final InputStream outerInputStream = s3Object.getKey().endsWith(".gz")
                        ? CompressionUtils.gzipInputStream(innerInputStream)
                        : innerInputStream;

                return IOUtils.lineIterator(
                        new BufferedReader(new InputStreamReader(outerInputStream, Charsets.UTF_8)));
            } catch (Exception e) {
                log.error(e, "Exception reading from bucket[%s] object[%s]", s3Bucket, s3Object.getKey());

                throw Throwables.propagate(e);
            }
        }

        @Override
        public void remove() {
            throw new UnsupportedOperationException();
        }
    }, firehoseParser);
}

From source file:org.fishwife.jrugged.httpclient.AbstractHttpClientDecorator.java

protected HttpHost getHttpHost(HttpUriRequest req) {
    URI uri = req.getURI();
    String scheme = uri.getScheme();
    if ("HTTPS".equalsIgnoreCase(scheme)) {
        return new HttpHost(uri.getScheme() + "://" + uri.getAuthority());
    } else {//from   ww  w.  j av  a 2s . c  o  m
        return new HttpHost(uri.getAuthority());
    }
}

From source file:com.github.frankfarrell.snowball.Application.java

@Bean
public RedissonClient redisson() throws URISyntaxException {
    /*//w w  w  .  ja va2 s.com
    Three Possibilities here
    Redis Running on Local,etc
    Redis Running Embedded
    Redis Running Heroku
     */
    Config config = new Config();

    //Unfortunately this is the only way to get it to work
    if (redisHeroku) {

        URI redisURI = new URI(System.getenv("REDIS_URL"));
        config.useSingleServer().setAddress(redisURI.getHost() + ":" + redisURI.getPort())
                .setPassword(redisURI.getAuthority().split("[:@]")[1]); //Strip the username from password
    } else {
        config.useSingleServer().setAddress(redisAddress + ":" + redisPort);
    }

    return Redisson.create(config);
}

From source file:com.thinkbiganalytics.nifi.security.ApplySecurityPolicy.java

protected void checkHdfsUriForTimeout(Configuration config) throws IOException {
    URI hdfsUri = FileSystem.getDefaultUri(config);
    String address = hdfsUri.getAuthority();
    int port = hdfsUri.getPort();
    if (address == null || address.isEmpty() || port < 0) {
        return;/*from w  w  w  .ja v a 2  s.c o  m*/
    }
    InetSocketAddress namenode = NetUtils.createSocketAddr(address, port);
    SocketFactory socketFactory = NetUtils.getDefaultSocketFactory(config);
    Socket socket = null;
    try {
        socket = socketFactory.createSocket();
        NetUtils.connect(socket, namenode, 1000); // 1 second timeout
    } finally {
        IOUtils.closeQuietly(socket);
    }
}

From source file:org.apache.hadoop.fs.TestHadoopArchives.java

@Test
public void testCopy() throws Exception {
    String localDir = CopyFilesBase.TEST_ROOT_DIR + "/srcdat";
    String localDir2 = CopyFilesBase.TEST_ROOT_DIR + "/srcdat2";
    Configuration conf = new Configuration();
    FileSystem localfs = FileSystem.getLocal(conf);

    MyFile[] myFiles = CopyFilesBase.createFiles(localfs, localDir);

    FileSystem fs = dfscluster.getFileSystem();
    Path archivePath = new Path(fs.getHomeDirectory(), "srcdat.har");

    {/*  w  w w  .  j a  va2 s  .co  m*/
        // copy from Local to hdfs
        String[] args = { "-copyFromLocal", localDir, archivePath.toString() };
        int ret = ToolRunner.run(new HadoopArchives(conf), args);
        assertTrue("failed test", ret == 0);

        URI uri = archivePath.toUri();
        // create appropriate har path
        Path harPath = new Path("har://" + uri.getScheme() + "-" + uri.getAuthority() + uri.getPath());

        FileSystem harfs = harPath.getFileSystem(conf);
        CopyFilesBase.checkFiles(harfs, archivePath.toString(), myFiles);
    }

    {
        // copy from hdfs to local
        localfs.mkdirs(new Path(localDir2));
        String[] args = { "-copyToLocal", archivePath.toString(), localDir2 };
        int ret = ToolRunner.run(new HadoopArchives(conf), args);
        assertTrue("failed test", ret == 0);

        CopyFilesBase.checkFiles(localfs, localDir2, myFiles);
    }

    CopyFilesBase.deldir(localfs, localDir);
    CopyFilesBase.deldir(localfs, localDir2);
    fs.delete(archivePath, true);
}

From source file:org.apache.oozie.service.TestHAShareLibService.java

protected void setUp() throws Exception {
    super.setUp();
    container = new EmbeddedServletContainer("oozie");
    container.addServletEndpoint("/v2/admin/*", V2AdminServlet.class);
    container.addServletEndpoint("/other-oozie-server/*", DummyV2AdminServlet.class);
    container.addFilter("*", HostnameFilter.class);
    container.start();//from w  w  w  . ja va  2 s .  com
    Services.get().setService(ShareLibService.class);
    Services.get().getConf().setBoolean(AuthorizationService.CONF_SECURITY_ENABLED, false);

    Services.get().setService(ZKJobsConcurrencyService.class);

    Path launcherlibPath = Services.get().get(WorkflowAppService.class).getSystemLibPath();
    HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
    URI uri = launcherlibPath.toUri();
    fs = FileSystem.get(has.createJobConf(uri.getAuthority()));
    Date time = new Date(System.currentTimeMillis());

    Path basePath = new Path(Services.get().getConf().get(WorkflowAppService.SYSTEM_LIB_PATH));
    Path libpath = new Path(basePath,
            ShareLibService.SHARE_LIB_PREFIX + ShareLibService.dateFormat.format(time));
    fs.mkdirs(libpath);

    Path pigPath = new Path(libpath.toString() + Path.SEPARATOR + "pig");
    Path pigPath1 = new Path(libpath.toString() + Path.SEPARATOR + "pig_9");
    Path pigPath2 = new Path(libpath.toString() + Path.SEPARATOR + "pig_10");
    fs.mkdirs(pigPath);
    fs.mkdirs(pigPath1);
    fs.mkdirs(pigPath2);
    fs.create(new Path(libpath.toString() + Path.SEPARATOR + "pig_10" + Path.SEPARATOR + "pig-10.jar")).close();

}

From source file:org.eclipse.orion.internal.server.servlets.xfer.SFTPTransfer.java

private void transferWithExceptions() throws ServletException, IOException, URISyntaxException, JSONException {
    String host, remotePath, user, passphrase;
    int port;/*  w ww. j av  a  2  s .c  o  m*/
    try {
        JSONObject requestInfo = OrionServlet.readJSONRequest(request);
        host = requestInfo.getString(ProtocolConstants.KEY_HOST);
        remotePath = requestInfo.getString(ProtocolConstants.KEY_PATH);
        port = requestInfo.optInt(ProtocolConstants.KEY_PORT, 22);
        user = requestInfo.getString(ProtocolConstants.KEY_USER_NAME);
        passphrase = requestInfo.getString(ProtocolConstants.KEY_PASSPHRASE);
    } catch (Exception e) {
        handleException("Request body is not in the expected format", e, HttpServletResponse.SC_BAD_REQUEST);
        return;
    }
    File localFile;
    try {
        localFile = localRoot.toLocalFile(EFS.NONE, null);
    } catch (CoreException e) {
        handleException(NLS.bind("Import is not supported at this location: {0}", localRoot.toString()), e,
                HttpServletResponse.SC_NOT_IMPLEMENTED);
        return;
    }
    SFTPTransferJob job;
    if (TransferServlet.PREFIX_IMPORT.equals(new Path(request.getPathInfo()).segment(0))) {
        job = new SFTPImportJob(TaskJobHandler.getUserId(request), localFile, host, port, new Path(remotePath),
                user, passphrase, options);
    } else {
        job = new SFTPExportJob(TaskJobHandler.getUserId(request), localFile, host, port, new Path(remotePath),
                user, passphrase, options);
    }
    job.schedule();
    TaskInfo task = job.getTask();
    JSONObject result = task.toJSON();
    //Not nice that the import service knows the location of the task servlet, but task service doesn't know this either
    URI requestLocation = ServletResourceHandler.getURI(request);
    URI taskLocation = new URI(requestLocation.getScheme(), requestLocation.getAuthority(),
            "/task/temp/" + task.getId(), null, null); //$NON-NLS-1$
    result.put(ProtocolConstants.KEY_LOCATION, taskLocation);
    response.setHeader(ProtocolConstants.HEADER_LOCATION,
            ServletResourceHandler.resovleOrionURI(request, taskLocation).toString());
    OrionServlet.writeJSONResponse(request, response, result);
    response.setStatus(HttpServletResponse.SC_ACCEPTED);
}