Example usage for org.apache.commons.lang StringUtils removeEnd

List of usage examples for org.apache.commons.lang StringUtils removeEnd

Introduction

In this page you can find the example usage for org.apache.commons.lang StringUtils removeEnd.

Prototype

public static String removeEnd(String str, String remove) 

Source Link

Document

Removes a substring only if it is at the end of a source string, otherwise returns the source string.

Usage

From source file:org.apache.hadoop.fs.azure.AzureNativeFileSystemStore.java

/**
 * Trims a suffix/prefix from the given string. For example if
 * s is given as "/xy" and toTrim is "/", this method returns "xy"
 *//* w ww .j a va 2 s  .  c o  m*/
private static String trim(String s, String toTrim) {
    return StringUtils.removeEnd(StringUtils.removeStart(s, toTrim), toTrim);
}

From source file:org.apache.hadoop.mapreduce.lib.db.TestDBOutputFormat.java

@Test
public void testDB2ConstructQuery() {
    String db2expected = StringUtils.removeEnd(expected, ";");
    String db2nullExpected = StringUtils.removeEnd(nullExpected, ";");

    try {/*from ww w .  ja va 2 s .com*/
        Class<?> clazz = this.format.getClass();
        Field field = clazz.getDeclaredField("dbProductName");
        field.setAccessible(true);
        field.set(format, "DB2");
    } catch (IllegalAccessException | NoSuchFieldException e) {
        fail(e.getMessage());
    }

    String actual = format.constructQuery("hadoop_output", fieldNames);
    assertEquals(db2expected, actual);

    actual = format.constructQuery("hadoop_output", nullFieldNames);
    assertEquals(db2nullExpected, actual);
}

From source file:org.apache.hadoop.mapreduce.lib.db.TestDBOutputFormat.java

@Test
public void testORACLEConstructQuery() {
    String oracleExpected = StringUtils.removeEnd(expected, ";");
    String oracleNullExpected = StringUtils.removeEnd(nullExpected, ";");

    try {/* w  w  w  .  ja  v  a  2 s  .  co  m*/
        Class<?> clazz = this.format.getClass();
        Field field = clazz.getDeclaredField("dbProductName");
        field.setAccessible(true);
        field.set(format, "ORACLE");
    } catch (IllegalAccessException | NoSuchFieldException e) {
        fail(e.getMessage());
    }

    String actual = format.constructQuery("hadoop_output", fieldNames);
    assertEquals(oracleExpected, actual);

    actual = format.constructQuery("hadoop_output", nullFieldNames);
    assertEquals(oracleNullExpected, actual);
}

From source file:org.apache.jackrabbit.core.query.lucene.FacetHandler.java

private String getNodeTypeFromSelector(String selectorName, String propertyName) throws RepositoryException {
    selectorName = StringUtils.removeEnd(selectorName, "translationAdded");
    Selector foundSelector = selector;
    //        for (SelectorImpl selector : ((SourceImpl) qomTree.getSource()).getSelectors()) {
    //            if (StringUtils.isEmpty(selectorName) || selectorName.equals(selector.getSelectorName())) {
    //                foundSelector = selector;
    //                break;
    //            }
    //        }//from  w w w.  j  ava  2s.  c  o  m
    return foundSelector.getNodeTypeName();
}

From source file:org.apache.jetspeed.portlets.spaces.PageNavigator.java

@Override
public void processAction(ActionRequest actionRequest, ActionResponse actionResponse)
        throws PortletException, IOException {
    String name = actionRequest.getParameter("name");
    String type = actionRequest.getParameter("type");
    String templatePage = StringUtils.defaultString(actionRequest.getParameter("templatePage"), null);

    SpaceBean space = (SpaceBean) actionRequest.getPortletSession().getAttribute(SpaceNavigator.ATTRIBUTE_SPACE,
            PortletSession.APPLICATION_SCOPE);

    if (space == null) {
        log.warn("Space not found in session.");
        return;/*from ww w .j  a  v a 2s  . c o m*/
    }

    if (StringUtils.isBlank(name)) {
        log.warn("Blank name to create a node of type " + type);
        return;
    }

    if (StringUtils.isBlank(type)) {
        throw new PortletException("Blank node type: " + type);
    }

    if ((Page.DOCUMENT_TYPE.equals(type) || (Folder.FOLDER_TYPE.equals(type)))
            && StringUtils.isBlank(templatePage)) {
        templatePage = actionRequest.getPreferences().getValue("defaultTemplatePage", null);

        if (StringUtils.isBlank(templatePage)) {
            throw new PortletException("Invalid template page: " + templatePage);
        }
    }

    try {
        RequestContext requestContext = (RequestContext) actionRequest
                .getAttribute(RequestContext.REQUEST_PORTALENV);
        ContentPage contentPage = requestContext.getPage();

        String spacePath = space.getPath();
        String contentPagePath = contentPage.getPath();
        String contentFolderPath = StringUtils
                .defaultIfEmpty(StringUtils.substringBeforeLast(contentPagePath, "/"), "/");
        String nodeName = name.replace(' ', '_');
        String nodePath = null;

        if (contentFolderPath.startsWith(spacePath)) {
            nodePath = StringUtils.removeEnd(contentFolderPath, "/") + "/"
                    + StringUtils.removeStart(nodeName, "/");
        } else {
            nodePath = StringUtils.removeEnd(spacePath, "/") + "/" + StringUtils.removeStart(nodeName, "/");
        }

        if (Page.DOCUMENT_TYPE.equals(type)) {
            String path = nodePath + Page.DOCUMENT_TYPE;
            Page source = pageManager.getPage(templatePage);
            Page newPage = pageManager.copyPage(source, path, false);
            newPage.setTitle(name);
            pageManager.updatePage(newPage);

            requestContext.setSessionAttribute(PORTAL_SITE_SESSION_CONTEXT_ATTR_KEY, null);

            String redirect = admin.getPortalURL(actionRequest, actionResponse, path);
            actionResponse.sendRedirect(redirect);
        } else if (Folder.FOLDER_TYPE.equals(type)) {
            String path = nodePath;
            Folder folder = pageManager.newFolder(path);
            folder.setTitle(name);
            pageManager.updateFolder(folder);

            String defaultPagePath = folder.getPath() + "/" + Folder.FALLBACK_DEFAULT_PAGE;
            Page source = pageManager.getPage(templatePage);
            Page newPage = pageManager.copyPage(source, defaultPagePath, false);
            pageManager.updatePage(newPage);

            requestContext.setSessionAttribute(PORTAL_SITE_SESSION_CONTEXT_ATTR_KEY, null);
        } else if (Link.DOCUMENT_TYPE.equals(type)) {
            String path = nodePath + Link.DOCUMENT_TYPE;
            Link link = pageManager.newLink(path);
            link.setTitle(name);
            pageManager.updateLink(link);

            requestContext.setSessionAttribute(PORTAL_SITE_SESSION_CONTEXT_ATTR_KEY, null);
        }
    } catch (Exception e) {
        log.error("Failed to update page.", e);
    }
}

From source file:org.apache.maven.scm.provider.svn.svnexe.command.remoteinfo.SvnRemoteInfoCommand.java

@Override
public RemoteInfoScmResult executeRemoteInfoCommand(ScmProviderRepository repository, ScmFileSet fileSet,
        CommandParameters parameters) throws ScmException {

    String url = ((SvnScmProviderRepository) repository).getUrl();
    // use a default svn layout, url is here http://svn.apache.org/repos/asf/maven/maven-3/trunk
    // so as we presume we have good users using standard svn layout, we calculate tags and branches url
    String baseUrl = StringUtils.endsWith(url, "/")
            ? StringUtils.substringAfter(StringUtils.removeEnd(url, "/"), "/")
            : StringUtils.substringBeforeLast(url, "/");

    Commandline cl = SvnCommandLineUtils.getBaseSvnCommandLine(fileSet == null ? null : fileSet.getBasedir(),
            (SvnScmProviderRepository) repository);

    cl.createArg().setValue("ls");

    cl.createArg().setValue(baseUrl + "/tags");

    CommandLineUtils.StringStreamConsumer stderr = new CommandLineUtils.StringStreamConsumer();

    LsConsumer consumer = new LsConsumer(getLogger(), baseUrl);

    int exitCode = 0;

    Map<String, String> tagsInfos = null;

    try {//from   w  ww  . j  a  va  2  s.c o m
        exitCode = SvnCommandLineUtils.execute(cl, consumer, stderr, getLogger());
        tagsInfos = consumer.infos;

    } catch (CommandLineException ex) {
        throw new ScmException("Error while executing svn command.", ex);
    }

    if (exitCode != 0) {
        return new RemoteInfoScmResult(cl.toString(), "The svn command failed.", stderr.getOutput(), false);
    }

    cl = SvnCommandLineUtils.getBaseSvnCommandLine(fileSet == null ? null : fileSet.getBasedir(),
            (SvnScmProviderRepository) repository);

    cl.createArg().setValue("ls");

    cl.createArg().setValue(baseUrl + "/tags");

    stderr = new CommandLineUtils.StringStreamConsumer();

    consumer = new LsConsumer(getLogger(), baseUrl);

    Map<String, String> branchesInfos = null;

    try {
        exitCode = SvnCommandLineUtils.execute(cl, consumer, stderr, getLogger());
        branchesInfos = consumer.infos;

    } catch (CommandLineException ex) {
        throw new ScmException("Error while executing svn command.", ex);
    }

    if (exitCode != 0) {
        return new RemoteInfoScmResult(cl.toString(), "The svn command failed.", stderr.getOutput(), false);
    }

    return new RemoteInfoScmResult(cl.toString(), branchesInfos, tagsInfos);
}

From source file:org.apache.maven.scm.provider.svn.svnjava.command.remoteinfo.SvnJavaRemoteInfoCommand.java

@Override
public RemoteInfoScmResult executeRemoteInfoCommand(ScmProviderRepository repository, ScmFileSet fileSet,
        CommandParameters parameters) throws ScmException {
    SvnJavaScmProviderRepository javaRepo = (SvnJavaScmProviderRepository) repository;

    String url = ((SvnScmProviderRepository) repository).getUrl();
    // use a default svn layout, url is here http://svn.apache.org/repos/asf/maven/maven-3/trunk
    // so as we presume we have good users using standard svn layout, we calculate tags and branches url
    String baseUrl = StringUtils.endsWith(url, "/")
            ? StringUtils.substringAfter(StringUtils.removeEnd(url, "/"), "/")
            : StringUtils.substringBeforeLast(url, "/");

    RemoteInfoScmResult remoteInfoScmResult = new RemoteInfoScmResult(null, null, null, true);

    try {/*from w w  w .  j  a  v a  2  s.  c o  m*/

        DirEntryHandler dirEntryHandler = new DirEntryHandler(baseUrl);
        javaRepo.getClientManager().getLogClient().doList(SVNURL.parseURIEncoded(baseUrl + "/tags"),
                SVNRevision.HEAD, SVNRevision.HEAD, false, false, dirEntryHandler);
        remoteInfoScmResult.setTags(dirEntryHandler.infos);
    } catch (SVNException e) {
        return new RemoteInfoScmResult(null, e.getMessage(), null, false);
    }

    try {

        DirEntryHandler dirEntryHandler = new DirEntryHandler(baseUrl);
        javaRepo.getClientManager().getLogClient().doList(SVNURL.parseURIEncoded(baseUrl + "/branches"),
                SVNRevision.HEAD, SVNRevision.HEAD, false, false, dirEntryHandler);
        remoteInfoScmResult.setBranches(dirEntryHandler.infos);
    } catch (SVNException e) {
        return new RemoteInfoScmResult(null, e.getMessage(), null, false);
    }

    return remoteInfoScmResult;

}

From source file:org.apache.nifi.processors.hadoop.FetchHDFS.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();//w  w  w.jav  a  2 s .  co m
    if (flowFile == null) {
        return;
    }

    final FileSystem hdfs = getFileSystem();
    final String filenameValue = context.getProperty(FILENAME).evaluateAttributeExpressions(flowFile)
            .getValue();

    Path path = null;
    try {
        path = new Path(filenameValue);
    } catch (IllegalArgumentException e) {
        getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to failure",
                new Object[] { filenameValue, flowFile, e });
        flowFile = session.putAttribute(flowFile, "hdfs.failure.reason", e.getMessage());
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        return;
    }

    InputStream stream = null;
    CompressionCodec codec = null;
    Configuration conf = getConfiguration();
    final CompressionCodecFactory compressionCodecFactory = new CompressionCodecFactory(conf);
    final CompressionType compressionType = CompressionType
            .valueOf(context.getProperty(COMPRESSION_CODEC).toString());
    final boolean inferCompressionCodec = compressionType == CompressionType.AUTOMATIC;

    if (inferCompressionCodec) {
        codec = compressionCodecFactory.getCodec(path);
    } else if (compressionType != CompressionType.NONE) {
        codec = getCompressionCodec(context, getConfiguration());
    }

    final URI uri = path.toUri();
    final StopWatch stopWatch = new StopWatch(true);
    try {

        final String outputFilename;
        final String originalFilename = path.getName();
        stream = hdfs.open(path, 16384);

        // Check if compression codec is defined (inferred or otherwise)
        if (codec != null) {
            stream = codec.createInputStream(stream);
            outputFilename = StringUtils.removeEnd(originalFilename, codec.getDefaultExtension());
        } else {
            outputFilename = originalFilename;
        }

        flowFile = session.importFrom(stream, flowFile);
        flowFile = session.putAttribute(flowFile, CoreAttributes.FILENAME.key(), outputFilename);

        stopWatch.stop();
        getLogger().info("Successfully received content from {} for {} in {}",
                new Object[] { uri, flowFile, stopWatch.getDuration() });
        session.getProvenanceReporter().fetch(flowFile, uri.toString(),
                stopWatch.getDuration(TimeUnit.MILLISECONDS));
        session.transfer(flowFile, REL_SUCCESS);
    } catch (final FileNotFoundException | AccessControlException e) {
        getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to failure",
                new Object[] { uri, flowFile, e });
        flowFile = session.putAttribute(flowFile, "hdfs.failure.reason", e.getMessage());
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    } catch (final IOException e) {
        getLogger().error("Failed to retrieve content from {} for {} due to {}; routing to comms.failure",
                new Object[] { uri, flowFile, e });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_COMMS_FAILURE);
    } finally {
        IOUtils.closeQuietly(stream);
    }
}

From source file:org.apache.nifi.processors.hadoop.GetHDFS.java

protected void processBatchOfFiles(final List<Path> files, final ProcessContext context,
        final ProcessSession session) {
    // process the batch of files
    InputStream stream = null;/* ww w .ja va 2s.c om*/
    CompressionCodec codec = null;
    Configuration conf = getConfiguration();
    FileSystem hdfs = getFileSystem();
    final boolean keepSourceFiles = context.getProperty(KEEP_SOURCE_FILE).asBoolean();
    final Double bufferSizeProp = context.getProperty(BUFFER_SIZE).asDataSize(DataUnit.B);
    int bufferSize = bufferSizeProp != null ? bufferSizeProp.intValue()
            : conf.getInt(BUFFER_SIZE_KEY, BUFFER_SIZE_DEFAULT);
    final Path rootDir = new Path(context.getProperty(DIRECTORY).evaluateAttributeExpressions().getValue());

    final CompressionType compressionType = CompressionType
            .valueOf(context.getProperty(COMPRESSION_CODEC).toString());
    final boolean inferCompressionCodec = compressionType == CompressionType.AUTOMATIC;
    if (inferCompressionCodec || compressionType != CompressionType.NONE) {
        codec = getCompressionCodec(context, getConfiguration());
    }
    final CompressionCodecFactory compressionCodecFactory = new CompressionCodecFactory(conf);
    for (final Path file : files) {
        try {
            if (!hdfs.exists(file)) {
                continue; // if file is no longer there then move on
            }
            final String originalFilename = file.getName();
            final String relativePath = getPathDifference(rootDir, file);

            stream = hdfs.open(file, bufferSize);

            final String outputFilename;
            // Check if we should infer compression codec
            if (inferCompressionCodec) {
                codec = compressionCodecFactory.getCodec(file);
            }
            // Check if compression codec is defined (inferred or otherwise)
            if (codec != null) {
                stream = codec.createInputStream(stream);
                outputFilename = StringUtils.removeEnd(originalFilename, codec.getDefaultExtension());
            } else {
                outputFilename = originalFilename;
            }

            FlowFile flowFile = session.create();

            final StopWatch stopWatch = new StopWatch(true);
            flowFile = session.importFrom(stream, flowFile);
            stopWatch.stop();
            final String dataRate = stopWatch.calculateDataRate(flowFile.getSize());
            final long millis = stopWatch.getDuration(TimeUnit.MILLISECONDS);

            flowFile = session.putAttribute(flowFile, CoreAttributes.PATH.key(), relativePath);
            flowFile = session.putAttribute(flowFile, CoreAttributes.FILENAME.key(), outputFilename);

            if (!keepSourceFiles && !hdfs.delete(file, false)) {
                getLogger().warn("Could not remove {} from HDFS. Not ingesting this file ...",
                        new Object[] { file });
                session.remove(flowFile);
                continue;
            }

            final String transitUri = (originalFilename.startsWith("/")) ? "hdfs:/" + originalFilename
                    : "hdfs://" + originalFilename;
            session.getProvenanceReporter().receive(flowFile, transitUri);
            session.transfer(flowFile, REL_SUCCESS);
            getLogger().info("retrieved {} from HDFS {} in {} milliseconds at a rate of {}",
                    new Object[] { flowFile, file, millis, dataRate });
            session.commit();
        } catch (final Throwable t) {
            getLogger().error("Error retrieving file {} from HDFS due to {}", new Object[] { file, t });
            session.rollback();
            context.yield();
        } finally {
            IOUtils.closeQuietly(stream);
            stream = null;
        }
    }
}

From source file:org.apache.rya.indexing.accumulo.freetext.AccumuloFreeTextIndexer.java

private Set<String> unrollWildcard(final String string, final boolean reverse) throws IOException {
    final Scanner termTableScan = getScanner(getFreeTextTermTablename(conf));

    final Set<String> unrolledTerms = new HashSet<String>();

    Text queryTerm;/* www .  j  a va  2 s . c  o m*/
    if (reverse) {
        final String t = StringUtils.removeStart(string, "*").toLowerCase();
        queryTerm = ColumnPrefixes.getRevTermListColFam(t);
    } else {
        final String t = StringUtils.removeEnd(string, "*").toLowerCase();
        queryTerm = ColumnPrefixes.getTermListColFam(t);
    }

    // perform query and read results
    termTableScan.setRange(Range.prefix(queryTerm));

    for (final Entry<Key, Value> e : termTableScan) {
        final String term = ColumnPrefixes.removePrefix(e.getKey().getRow()).toString();
        if (reverse) {
            unrolledTerms.add(StringUtils.reverse(term));
        } else {
            unrolledTerms.add(term);
        }
    }

    if (unrolledTerms.isEmpty()) {
        // put in a placeholder term that will never be in the index.
        unrolledTerms.add("\1\1\1");
    }

    return unrolledTerms;
}