Example usage for java.lang System lineSeparator

List of usage examples for java.lang System lineSeparator

Introduction

In this page you can find the example usage for java.lang System lineSeparator.

Prototype

String lineSeparator

To view the source code for java.lang System lineSeparator.

Click Source Link

Usage

From source file:net.di2e.ecdr.source.rest.CDROpenSearchSource.java

protected SourceResponse doQuery(Map<String, String> filterParameters, QueryRequest queryRequest)
        throws UnsupportedQueryException {
    SourceResponse sourceResponse;/*from w  w w .  j  av a2  s .  c  om*/
    SearchResponseTransformer transformer = lookupSearchResponseTransformer();

    setSecurityCredentials(cdrRestClient, queryRequest.getProperties());
    filterParameters.putAll(getInitialFilterParameters(queryRequest));
    setURLQueryString(filterParameters);
    setHttpHeaders(filterParameters, cdrRestClient);
    LOGGER.debug("Executing http GET query to source [{}] with url [{}]", localId,
            cdrRestClient.getCurrentURI().toString());
    // TLSUtil.setTLSOptions( cdrRestClient );
    Response response = cdrRestClient.get();
    LOGGER.debug("Query to source [{}] returned http status code [{}] and media type [{}]", localId,
            response.getStatus(), response.getMediaType());

    if (response.getStatus() == Status.OK.getStatusCode()) {
        // Be sure to pass in the getId() instead of the localId so Connected sources populate the Metacard with the
        // right Id
        sourceResponse = transformer.processSearchResponse((InputStream) response.getEntity(), queryRequest,
                getId());
        if (!supportsQueryById()) {
            sourceResponse = cacheResults(sourceResponse);
        }
    } else {
        Object entity = response.getEntity();
        if (entity != null) {
            try {
                LOGGER.warn("Error status code received [{}] when querying site [{}]:{}[{}]",
                        response.getStatus(), localId, System.lineSeparator(),
                        IOUtils.toString((InputStream) entity));
            } catch (IOException e) {
                LOGGER.warn("Error status code received [{}] when querying site [{}]", response.getStatus(),
                        localId);
            }
        } else {
            LOGGER.warn("Error status code received [{}] when querying site [{}]", response.getStatus(),
                    localId);
        }
        throw new UnsupportedQueryException(
                "Query to remote source returned http status code " + response.getStatus());
    }
    return sourceResponse;
}

From source file:cc.altruix.javaprologinterop.PlUtilsLogic.java

public void loadPrologFiles(final Prolog engine, final String[] files)
        throws IOException, InvalidTheoryException {
    final List<String> paths = Arrays.asList(files);
    final StringBuilder theoryBuilder = new StringBuilder();

    for (final String path : paths) {
        theoryBuilder.append(System.lineSeparator());
        theoryBuilder.append("% ");
        theoryBuilder.append(path);/*from   w w  w.  j  a  va  2 s  . co  m*/
        theoryBuilder.append(" (START)");
        theoryBuilder.append(System.lineSeparator());
        theoryBuilder.append(FileUtils.readFileToString(new File(path)));
        theoryBuilder.append(System.lineSeparator());
        theoryBuilder.append("% ");
        theoryBuilder.append(path);
        theoryBuilder.append(" (END)");
        theoryBuilder.append(System.lineSeparator());
    }

    final Theory test1 = new Theory(theoryBuilder.toString());
    engine.setTheory(test1);
}

From source file:de.static_interface.sinkscripts.SinkScripts.java

public void runInjection(File file) throws Exception {
    String[] lines = Util.readLines(file);

    String code = "";
    String targetClass = null;/*from   w  ww. j a va 2 s.c  o  m*/
    boolean constructor = false;
    InjectTarget target = InjectTarget.AFTER_METHOD;
    String method = null;
    List<Class> methodArgs = new ArrayList<>();
    boolean codeStart = false;

    for (String line : lines) {
        if (line.equals("@@INJECT@@")) {
            codeStart = true;
            continue;
        }

        if (!codeStart) {
            if (line.startsWith("[") && line.endsWith("]")) {
                line = line.replaceFirst("\\Q[\\E", "");
                line = StringUtil.replaceLast(line, "]", "");
                String[] parts = line.split(":", 2);
                parts[0] = parts[0].trim();
                if (parts[0].equalsIgnoreCase("Constructor")) {
                    constructor = true;
                }

                if (parts.length < 2)
                    continue;
                parts[1] = parts[1].trim();

                switch (parts[0].toLowerCase()) {
                case "method":
                    method = parts[1];
                    break;
                case "at":
                case "injecttarget":
                    target = InjectTarget.valueOf(parts[1].toUpperCase());
                    break;
                case "arg":
                case "methodarg":
                    methodArgs.add(Class.forName(parts[1]));
                    break;
                case "class":
                case "targetclass":
                case "target":
                    targetClass = parts[1];
                    break;
                }
                continue;
            }

            if (constructor && method != null) {
                throw new Exception("Invalid config: construct & method specified at the same time!");
            }

            continue;
        }

        if (code.equals("")) {
            code = line;
            continue;
        }
        code += System.lineSeparator() + line;
    }

    Validate.notNull(target, "injecttarget is not specified");
    Validate.notNull(targetClass, "class is not specified");
    if (!constructor) {
        Validate.notNull(method, "method or constructor is not specified");
    }
    Validate.notEmpty(code, "no code found");

    Class clazz = Class.forName(targetClass);
    if (!constructor) {
        Injector.injectCode(targetClass, clazz.getClassLoader(), method,
                methodArgs.toArray(new Class[methodArgs.size()]), code, target);
    } else {
        Injector.injectCodeConstructor(targetClass, clazz.getClassLoader(),
                methodArgs.toArray(new Class[methodArgs.size()]), code, target);
    }
}

From source file:org.apache.zeppelin.submarine.hadoop.YarnClient.java

public Map<String, Object> getAppServices(String appIdOrName) {
    Map<String, Object> mapStatus = new HashMap<>();
    String appUrl = this.yarnWebHttpAddr + "/app/v1/services/" + appIdOrName + "?_="
            + System.currentTimeMillis();

    InputStream inputStream = null;
    try {// w  w w  .j  av  a 2  s  .  c  o m
        HttpResponse response = callRestUrl(appUrl, principal, HTTP.GET);
        inputStream = response.getEntity().getContent();
        String result = new BufferedReader(new InputStreamReader(inputStream)).lines()
                .collect(Collectors.joining(System.lineSeparator()));
        if (response.getStatusLine().getStatusCode() != 200 /*success*/
                && response.getStatusLine().getStatusCode() != 404 /*Not found*/) {
            LOGGER.warn("Status code " + response.getStatusLine().getStatusCode());
            LOGGER.warn("message is :" + Arrays.deepToString(response.getAllHeaders()));
            LOGGER.warn("result\n" + result);
        }

        // parse app status json
        mapStatus = parseAppServices(result);
    } catch (Exception exp) {
        exp.printStackTrace();
    } finally {
        try {
            if (null != inputStream) {
                inputStream.close();
            }
        } catch (Exception e) {
            LOGGER.error(e.getMessage(), e);
        }
    }

    return mapStatus;
}

From source file:com.stratio.explorer.interpreter.InterpreterFactory.java

private String loadFromFile(String path) {

    File fileToRead = new File(path);
    if (!fileToRead.exists()) {
        // nothing to read
        return "empty";
    }/* w  w w  .  j  a  v a2  s  .  com*/
    FileInputStream fis = null;
    try {
        fis = new FileInputStream(fileToRead);

        InputStreamReader isr = new InputStreamReader(fis);
        BufferedReader bufferedReader = new BufferedReader(isr);
        StringBuilder sb = new StringBuilder();
        String line;
        while ((line = bufferedReader.readLine()) != null) {
            sb.append(line).append(System.lineSeparator());
        }
        isr.close();
        fis.close();
        return sb.toString();

    } catch (FileNotFoundException e) {
        return "input stream error " + e;
    } catch (IOException e) {
        return "io exception " + e;
    }

}

From source file:com.pearson.eidetic.driver.threads.subthreads.SnapshotVolumeNoTime.java

public Integer getKeep(JSONObject eideticParameters, Volume vol) {
    if ((eideticParameters == null) | (vol == null)) {
        return null;
    }//w  w  w  . ja  v  a  2 s  .c  o  m

    JSONObject createSnapshot = null;
    if (eideticParameters.containsKey("CreateSnapshot")) {
        createSnapshot = (JSONObject) eideticParameters.get("CreateSnapshot");
    }
    if (createSnapshot == null) {
        logger.error("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_
                + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + vol.getVolumeId() + "\"");
        return null;
    }

    Integer keep = null;
    if (createSnapshot.containsKey("Retain")) {
        try {
            keep = Integer.parseInt(createSnapshot.get("Retain").toString());
        } catch (Exception e) {
            logger.error("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_
                    + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + vol.getVolumeId()
                    + "\", stacktrace=\"" + e.toString() + System.lineSeparator()
                    + StackTrace.getStringFromStackTrace(e) + "\"");
        }
    }

    return keep;
}

From source file:com.pearson.eidetic.driver.threads.RefreshAwsAccountVolumes.java

private JSONObject getEideticParameters(Volume volume, JSONParser parser) {
    JSONObject eideticParameters = null;
    for (Tag tag : volume.getTags()) {
        String tagValue = null;/*ww  w.  j a  va 2  s.  c  o m*/
        if (tag.getKey().equalsIgnoreCase("Eidetic")) {
            tagValue = tag.getValue();
        }
        if (tagValue == null) {
            continue;
        }

        try {
            Object obj = parser.parse(tagValue);
            eideticParameters = (JSONObject) obj;
        } catch (Exception e) {
            logger.error("awsAccountNickname=\"" + uniqueAwsAccountIdentifier_
                    + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + volume.getVolumeId()
                    + "\", stacktrace=\"" + e.toString() + System.lineSeparator()
                    + StackTrace.getStringFromStackTrace(e) + "\"");
            break;
        }
    }
    return eideticParameters;
}

From source file:org.apache.nifi.minifi.bootstrap.util.ConfigTransformer.java

protected static void writeNiFiProperties(ConfigSchema configSchema, OutputStream outputStream)
        throws IOException, ConfigurationChangeException {
    try {/*from w  w  w  .j a va 2 s.  c  om*/
        CorePropertiesSchema coreProperties = configSchema.getCoreProperties();
        FlowFileRepositorySchema flowfileRepoSchema = configSchema.getFlowfileRepositoryProperties();
        SwapSchema swapProperties = flowfileRepoSchema.getSwapProperties();
        ContentRepositorySchema contentRepoProperties = configSchema.getContentRepositoryProperties();
        ComponentStatusRepositorySchema componentStatusRepoProperties = configSchema
                .getComponentStatusRepositoryProperties();
        SecurityPropertiesSchema securityProperties = configSchema.getSecurityProperties();
        SensitivePropsSchema sensitiveProperties = securityProperties.getSensitiveProps();
        ProvenanceRepositorySchema provenanceRepositorySchema = configSchema.getProvenanceRepositorySchema();

        OrderedProperties orderedProperties = new OrderedProperties();
        orderedProperties.setProperty(NIFI_VERSION_KEY, NIFI_VERSION,
                "# Core Properties #" + System.lineSeparator());
        orderedProperties.setProperty("nifi.flow.configuration.file", "./conf/flow.xml.gz");
        orderedProperties.setProperty("nifi.flow.configuration.archive.enabled", "false");
        orderedProperties.setProperty("nifi.flow.configuration.archive.dir", "./conf/archive/");
        orderedProperties.setProperty("nifi.flowcontroller.autoResumeState", "true");
        orderedProperties.setProperty("nifi.flowcontroller.graceful.shutdown.period",
                coreProperties.getFlowControllerGracefulShutdownPeriod());
        orderedProperties.setProperty("nifi.flowservice.writedelay.interval",
                coreProperties.getFlowServiceWriteDelayInterval());
        orderedProperties.setProperty("nifi.administrative.yield.duration",
                coreProperties.getAdministrativeYieldDuration());
        orderedProperties.setProperty("nifi.variable.registry.properties",
                coreProperties.getVariableRegistryProperties());

        orderedProperties.setProperty("nifi.bored.yield.duration", coreProperties.getBoredYieldDuration(),
                "# If a component has no work to do (is \"bored\"), how long should we wait before checking again for work?");

        orderedProperties.setProperty("nifi.authority.provider.configuration.file",
                "./conf/authority-providers.xml", "");
        orderedProperties.setProperty("nifi.login.identity.provider.configuration.file",
                "./conf/login-identity-providers.xml");
        orderedProperties.setProperty("nifi.templates.directory", "./conf/templates");
        orderedProperties.setProperty("nifi.ui.banner.text", "");
        orderedProperties.setProperty("nifi.ui.autorefresh.interval", "30 sec");
        orderedProperties.setProperty("nifi.nar.library.directory", "./lib");
        orderedProperties.setProperty("nifi.nar.working.directory", "./work/nar/");
        orderedProperties.setProperty("nifi.documentation.working.directory", "./work/docs/components");

        orderedProperties.setProperty("nifi.state.management.configuration.file", "./conf/state-management.xml",
                System.lineSeparator() + "####################" + "# State Management #"
                        + "####################");

        orderedProperties.setProperty("nifi.state.management.provider.local", "local-provider",
                "# The ID of the local state provider");

        orderedProperties.setProperty("nifi.database.directory", "./database_repository",
                System.lineSeparator() + "# H2 Settings");
        orderedProperties.setProperty("nifi.h2.url.append",
                ";LOCK_TIMEOUT=25000;WRITE_DELAY=0;AUTO_SERVER=FALSE");
        orderedProperties.setProperty("nifi.flowfile.repository.implementation",
                "org.apache.nifi.controller.repository.WriteAheadFlowFileRepository",
                System.lineSeparator() + "# FlowFile Repository");
        orderedProperties.setProperty("nifi.flowfile.repository.directory", "./flowfile_repository");
        orderedProperties.setProperty("nifi.flowfile.repository.partitions",
                String.valueOf(flowfileRepoSchema.getPartitions()));
        orderedProperties.setProperty("nifi.flowfile.repository.checkpoint.interval",
                flowfileRepoSchema.getCheckpointInterval());
        orderedProperties.setProperty("nifi.flowfile.repository.always.sync",
                Boolean.toString(flowfileRepoSchema.getAlwaysSync()));

        orderedProperties.setProperty("nifi.swap.manager.implementation",
                "org.apache.nifi.controller.FileSystemSwapManager", "");
        orderedProperties.setProperty("nifi.queue.swap.threshold",
                String.valueOf(swapProperties.getThreshold()));
        orderedProperties.setProperty("nifi.swap.in.period", swapProperties.getInPeriod());
        orderedProperties.setProperty("nifi.swap.in.threads", String.valueOf(swapProperties.getInThreads()));
        orderedProperties.setProperty("nifi.swap.out.period", swapProperties.getOutPeriod());
        orderedProperties.setProperty("nifi.swap.out.threads", String.valueOf(swapProperties.getOutThreads()));

        orderedProperties.setProperty("nifi.content.repository.implementation",
                "org.apache.nifi.controller.repository.FileSystemRepository",
                System.lineSeparator() + "# Content Repository");
        orderedProperties.setProperty("nifi.content.claim.max.appendable.size",
                contentRepoProperties.getContentClaimMaxAppendableSize());
        orderedProperties.setProperty("nifi.content.claim.max.flow.files",
                String.valueOf(contentRepoProperties.getContentClaimMaxFlowFiles()));
        orderedProperties.setProperty("nifi.content.repository.archive.max.retention.period", "");
        orderedProperties.setProperty("nifi.content.repository.archive.max.usage.percentage", "");
        orderedProperties.setProperty("nifi.content.repository.archive.enabled", "false");
        orderedProperties.setProperty("nifi.content.repository.directory.default", "./content_repository");
        orderedProperties.setProperty("nifi.content.repository.always.sync",
                Boolean.toString(contentRepoProperties.getAlwaysSync()));

        orderedProperties.setProperty("nifi.provenance.repository.implementation",
                provenanceRepositorySchema.getProvenanceRepository(),
                System.lineSeparator() + "# Provenance Repository Properties");

        orderedProperties.setProperty("nifi.provenance.repository.rollover.time",
                provenanceRepositorySchema.getProvenanceRepoRolloverTimeKey());

        orderedProperties.setProperty("nifi.provenance.repository.buffer.size", "10000",
                System.lineSeparator() + "# Volatile Provenance Respository Properties");

        orderedProperties.setProperty("nifi.components.status.repository.implementation",
                "org.apache.nifi.controller.status.history.VolatileComponentStatusRepository",
                System.lineSeparator() + "# Component Status Repository");
        orderedProperties.setProperty("nifi.components.status.repository.buffer.size",
                String.valueOf(componentStatusRepoProperties.getBufferSize()));
        orderedProperties.setProperty("nifi.components.status.snapshot.frequency",
                componentStatusRepoProperties.getSnapshotFrequency());

        orderedProperties.setProperty("nifi.web.war.directory", "./lib",
                System.lineSeparator() + "# web properties #");
        orderedProperties.setProperty("nifi.web.http.host", "");
        orderedProperties.setProperty("nifi.web.http.port", "8081");
        orderedProperties.setProperty("nifi.web.https.host", "");
        orderedProperties.setProperty("nifi.web.https.port", "");
        orderedProperties.setProperty("nifi.web.jetty.working.directory", "./work/jetty");
        orderedProperties.setProperty("nifi.web.jetty.threads", "200");

        orderedProperties.setProperty("nifi.sensitive.props.key", sensitiveProperties.getKey(),
                System.lineSeparator() + "# security properties #");
        orderedProperties.setProperty("nifi.sensitive.props.algorithm", sensitiveProperties.getAlgorithm());
        orderedProperties.setProperty("nifi.sensitive.props.provider", sensitiveProperties.getProvider());

        orderedProperties.setProperty("nifi.security.keystore", securityProperties.getKeystore(), "");
        orderedProperties.setProperty("nifi.security.keystoreType", securityProperties.getKeystoreType());
        orderedProperties.setProperty("nifi.security.keystorePasswd", securityProperties.getKeystorePassword());
        orderedProperties.setProperty("nifi.security.keyPasswd", securityProperties.getKeyPassword());
        orderedProperties.setProperty("nifi.security.truststore", securityProperties.getTruststore());
        orderedProperties.setProperty("nifi.security.truststoreType", securityProperties.getTruststoreType());
        orderedProperties.setProperty("nifi.security.truststorePasswd",
                securityProperties.getTruststorePassword());
        orderedProperties.setProperty("nifi.security.needClientAuth", "");
        orderedProperties.setProperty("nifi.security.user.credential.cache.duration", "24 hours");
        orderedProperties.setProperty("nifi.security.user.authority.provider", "file-provider");
        orderedProperties.setProperty("nifi.security.user.login.identity.provider", "");
        orderedProperties.setProperty("nifi.security.support.new.account.requests", "");

        orderedProperties.setProperty("nifi.security.anonymous.authorities", "",
                "# Valid Authorities include: ROLE_MONITOR,ROLE_DFM,ROLE_ADMIN,ROLE_PROVENANCE,ROLE_NIFI");
        orderedProperties.setProperty("nifi.security.ocsp.responder.url", "");
        orderedProperties.setProperty("nifi.security.ocsp.responder.certificate", "");

        orderedProperties.setProperty("nifi.cluster.is.node", "false", System.lineSeparator()
                + System.lineSeparator() + "# cluster node properties (only configure for cluster nodes) #");
        orderedProperties.setProperty("nifi.cluster.is.manager", "false",
                System.lineSeparator() + "# cluster manager properties (only configure for cluster manager) #");

        for (Map.Entry<String, String> entry : configSchema.getNifiPropertiesOverrides().entrySet()) {
            orderedProperties.setProperty(entry.getKey(), entry.getValue());
        }

        orderedProperties.store(outputStream, PROPERTIES_FILE_APACHE_2_0_LICENSE);
    } catch (NullPointerException e) {
        throw new ConfigurationChangeException(
                "Failed to parse the config YAML while creating the nifi.properties", e);
    } finally {
        outputStream.close();
    }
}

From source file:com.pearson.eidetic.driver.threads.subthreads.SnapshotVolumeSync.java

public boolean snapshotCreation(AmazonEC2Client ec2Client, Volume vol, Date date) {
    if ((date == null) || (ec2Client == null) || (vol == null)) {
        return false;
    }/*from   w  w w .  j a va 2s  .  c  om*/

    try {

        Collection<Tag> tags_volume = getResourceTags(vol);

        String volumeAttachmentInstance = "none";
        try {
            volumeAttachmentInstance = vol.getAttachments().get(0).getInstanceId();
        } catch (Exception e) {
            logger.debug("Volume not attached to instance: " + vol.getVolumeId());
        }

        String description;
        if (Validator_) {
            description = "sync_snapshot " + vol.getVolumeId() + " called by Eidetic Validator Synchronizer at "
                    + date.toString() + ". Volume attached to " + volumeAttachmentInstance;
        } else {
            description = "sync_snapshot " + vol.getVolumeId() + " by Eidetic Synchronizer at "
                    + date.toString() + ". Volume attached to " + volumeAttachmentInstance;
        }

        Snapshot current_snap;
        try {
            current_snap = createSnapshotOfVolume(ec2Client, vol, description, numRetries_,
                    maxApiRequestsPerSecond_, uniqueAwsAccountIdentifier_);
        } catch (Exception e) {
            logger.error("awsAccountId=\"" + uniqueAwsAccountIdentifier_
                    + "\",Event=Error, Error=\"Malformed Eidetic Tag\", Volume_id=\"" + vol.getVolumeId()
                    + "\", stacktrace=\"" + e.toString() + System.lineSeparator()
                    + StackTrace.getStringFromStackTrace(e) + "\"");
            return false;
        }

        try {
            setResourceTags(ec2Client, current_snap, tags_volume, numRetries_, maxApiRequestsPerSecond_,
                    uniqueAwsAccountIdentifier_);
        } catch (Exception e) {
            logger.error("awsAccountId=\"" + uniqueAwsAccountIdentifier_
                    + "\",Event\"Error\", Error=\"error adding tags to snapshot\", Snapshot_id=\""
                    + current_snap.getVolumeId() + "\", stacktrace=\"" + e.toString() + System.lineSeparator()
                    + StackTrace.getStringFromStackTrace(e) + "\"");
            return false;
        }

    } catch (Exception e) {
        logger.error("awsAccountId=\"" + uniqueAwsAccountIdentifier_
                + "\",Event=\"Error, Error=\"error in snapshotCreation\", stacktrace=\"" + e.toString()
                + System.lineSeparator() + StackTrace.getStringFromStackTrace(e) + "\"");
        return false;
    }

    return true;
}

From source file:com.datamelt.nifi.processors.SplitToAttribute.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {

    // get selected number format for the field number
    String numberFormat = context.getProperty(FIELD_NUMBER_NUMBERFORMAT).getValue();

    // for formatting the number
    final DecimalFormat df;
    if (numberFormat != null && !numberFormat.trim().equals("")) {
        df = new DecimalFormat(context.getProperty(FIELD_NUMBER_NUMBERFORMAT).getValue());
    } else {/* w  ww.  j a v  a 2  s .c  o  m*/
        df = new DecimalFormat();
    }

    // get the flowfile
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }

    session.read(flowFile, new InputStreamCallback() {
        public void process(InputStream in) throws IOException {
            try {

                // get the flow file content
                String row = IOUtils.toString(in, "UTF-8");

                // check that we have data
                if (row != null && !row.trim().equals("")) {
                    //put the information which field prefix was used to the map
                    propertyMap.put(PROPERTY_ATTRIBUTE_PREFIX_ATTRIBUTE_NAME,
                            context.getProperty(ATTRIBUTE_PREFIX).getValue());

                    // Split the row into separate fields using the FIELD_SEPARATOR property
                    String[] fields = row.split(context.getProperty(FIELD_SEPARATOR).getValue());

                    // loop over the fields
                    if (fields != null && fields.length > 0) {

                        for (int i = 0; i < fields.length; i++) {
                            if (fields[i] != null && !fields[i].trim().equals("")) {
                                String field = fields[i];
                                // remove any lineseparators
                                field = field.replace(System.lineSeparator(), "");

                                // put into the map of attributes
                                propertyMap.put(context.getProperty(ATTRIBUTE_PREFIX).getValue() + df.format(i),
                                        field);
                            }
                        }
                    }
                }
            } catch (Exception ex) {
                ex.printStackTrace();
                logger.error("Failed to split data into fields using seperator: ["
                        + context.getProperty(FIELD_SEPARATOR).getValue() + "]");
            }
        }
    });

    // put the map to the flowfile

    flowFile = session.putAllAttributes(flowFile, propertyMap);
    // for provenance
    session.getProvenanceReporter().modifyAttributes(flowFile);
    // transfer the flowfile
    session.transfer(flowFile, SUCCESS);
}