Example usage for org.apache.commons.lang.builder ToStringBuilder reflectionToString

List of usage examples for org.apache.commons.lang.builder ToStringBuilder reflectionToString

Introduction

In this page you can find the example usage for org.apache.commons.lang.builder ToStringBuilder reflectionToString.

Prototype

public static String reflectionToString(Object object, ToStringStyle style) 

Source Link

Document

Forwards to ReflectionToStringBuilder.

Usage

From source file:fr.dudie.nominatim.client.JsonNominatimClientTest.java

@Test
public void testSearchWithResults() throws IOException {

    LOGGER.info("testSearchWithResults.start");

    final List<Address> addresses = nominatimClient.search("vitr, rennes");

    assertNotNull("result list is never null", addresses);
    for (final Address address : addresses) {
        LOGGER.debug(ToStringBuilder.reflectionToString(address, ToStringStyle.MULTI_LINE_STYLE));
    }//from  w w w  . j a v a2  s. c om
    assertTrue("list is not empty", !addresses.isEmpty());

    LOGGER.info("testSearchWithResults.end");
}

From source file:de.hybris.platform.b2bpunchoutaddon.controllers.pages.DefaultPunchOutController.java

private String printSessionInfo(final PunchOutSession punchoutSession) {

    return new ToStringBuilder(punchoutSession, ToStringStyle.SHORT_PREFIX_STYLE)
            .append("operation", punchoutSession.getOperation())
            .append("browserFormPostUrl", punchoutSession.getBrowserFormPostUrl())
            .append("buyerCookie", punchoutSession.getBuyerCookie()).append("time", punchoutSession.getTime())
            .append("initiatedBy",
                    ToStringBuilder.reflectionToString(punchoutSession.getInitiatedBy(),
                            ToStringStyle.SHORT_PREFIX_STYLE))
            .append("targetedTo",
                    ToStringBuilder.reflectionToString(punchoutSession.getTargetedTo(),
                            ToStringStyle.SHORT_PREFIX_STYLE))
            .append("sentBy",
                    ToStringBuilder.reflectionToString(punchoutSession.getSentBy(),
                            ToStringStyle.SHORT_PREFIX_STYLE))
            .append("shippingAddress", ToStringBuilder.reflectionToString(punchoutSession.getShippingAddress(),
                    ToStringStyle.SHORT_PREFIX_STYLE))
            .toString();/*from www  .j  a va2s . c  o m*/
}

From source file:ee.ria.xroad.common.request.DummyCentralServiceHandler.java

private static String toString(Object o) {
    return ToStringBuilder.reflectionToString(o, ToStringStyle.MULTI_LINE_STYLE);
}

From source file:fr.dudie.acrachilisync.handler.AcraToChiliprojectSyncHandler.java

/**
 * {@inheritDoc}/*from w  w  w  .  jav  a 2 s.co m*/
 * 
 * @see fr.dudie.acrachilisync.handler.AcraReportHandler#onKnownIssueNotSynchronized(fr.dudie.acrachilisync.model.AcraReport,
 *      org.redmine.ta.beans.Issue)
 */
@Override
public void onKnownIssueNotSynchronized(final AcraReport pReport, final Issue pIssue)
        throws SynchronizationException {

    IssueDescriptionReader reader = null;
    try {
        reader = new IssueDescriptionReader(pIssue);

    } catch (final IssueParseException e) {
        throw new SynchronizationException("Unable to parse description of issue " + pIssue.getId(), e);
    }

    final IssueDescriptionBuilder builder = new IssueDescriptionBuilder(reader.getStacktrace());
    builder.setOccurrences(reader.getOccurrences());
    builder.addOccurrence(IssueDescriptionUtils.toErrorOccurrence(pReport));

    pIssue.setDescription(builder.build());

    try {
        redmineClient.updateIssue(pIssue);
    } catch (final Exception e) {
        pReport.setStatus(SyncStatus.FAILURE);
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("Failure while updating issue: \n{}",
                    ToStringBuilder.reflectionToString(pIssue, ToStringStyle.MULTI_LINE_STYLE));
        }
        throw new SynchronizationException(
                String.format("Unable to update issue %s for ACRA report %s", pIssue.getId(), pReport.getId()),
                e);
    }
    pReport.setStatus(SyncStatus.SUCCESS);
}

From source file:com.nhncorp.ips.common.bo.ResultSenderBOImpl.java

/**
 * @param request//from  w  w w .j  av  a  2  s . c  om
 */
private void setupDetailInfo(WorkRequest request) {
    // seller  .
    List<ReportProduct> productList = request.getProductList();
    Seller seller;
    for (ReportProduct reportProduct : productList) {
        //   ?? seller ? ?
        // ? seller   ? ? .
        if (reportProduct.getPresume() != null) {
            reportProduct.setSeller(reportProduct.getPresume().getSeller());
        }
        seller = reportProduct.getSeller();

        // seller  
        // ?  .
        if (seller.getId() == null) {
            // ? ? ID  Product API  seller .
            RemoteSimpleProduct product = remoteSimpleProductService.get(reportProduct.getProductId());

            seller.setId(product.getSellerId());
            seller.setNo(product.getSellerNo());
            seller.setShopName(product.getSellerShopName());
        }

        logger.debug("#### {}  ? ?? ? {}", request.getWkResult().getName(), seller);

        //  seller  seller api  ? seller?
        // seller?  .
        // TODO ? ? API? seller? ?     ? API    ? .
        RemoteSeller remoteSeller = remoteSellerService.findByLoginId(seller.getId());

        logger.debug("###  ? seller ? : {}",
                ToStringBuilder.reflectionToString(remoteSeller, ToStringStyle.MULTI_LINE_STYLE));
        if (StringUtils.equals(remoteSeller.getType().getCode(), SellerType.PERSONAL.getCode())
                || StringUtils.equals(remoteSeller.getType().getCode(), SellerType.DOMESTIC_PERSONAL.getCode())
                || StringUtils.equals(remoteSeller.getType().getCode(), SellerType.BUSINESS.getCode())
                || StringUtils.equals(remoteSeller.getType().getCode(),
                        SellerType.DOMESTIC_BUSINESS.getCode())) {
            seller.setPhoneNumber(remoteSeller.getCellPhoneNumber());
        } else if (StringUtils.equals(remoteSeller.getType().getCode(),
                SellerType.OVERSEAS_PERSONAL.getCode())) {
            seller.setPhoneNumber(remoteSeller.getOverseasTelephoneNumber());
        } else if (StringUtils.equals(remoteSeller.getType().getCode(),
                SellerType.OVERSEAS_BUSINESS.getCode())) {
            BusinessSeller businessSeller = (BusinessSeller) remoteSeller;
            seller.setPhoneNumber(businessSeller.getChargerOverseasTelephoneNumber());
        }

        // ?  
        final Owner owner = ownerBO.findOwnerByReportedProduct(reportProduct);
        reportProduct.setReportingOwner(owner);
        request.setOwner(owner);
    }

}

From source file:fr.dudie.nominatim.client.JsonNominatimClientTest.java

@Test
public void testSearchWithForLongPlaceId() throws IOException {

    LOGGER.info("testSearchWithResults.start");

    final List<Address> addresses = nominatimClient
            .search(". ,37, ?--");

    assertNotNull("result list is not null", addresses);
    for (final Address address : addresses) {
        LOGGER.debug(ToStringBuilder.reflectionToString(address, ToStringStyle.MULTI_LINE_STYLE));
    }//w w  w .  ja  va  2 s .c o m
    assertTrue("list is not empty", !addresses.isEmpty());

    LOGGER.info("testSearchWithResults.end");
}

From source file:gov.nih.nci.coppa.services.client.ClientUtils.java

private static <E> void printString(E result) {
    System.out.println(ToStringBuilder.reflectionToString(result, ToStringStyle.MULTI_LINE_STYLE));
}

From source file:com.gemini.provision.network.openstack.NetworkProviderOpenStackImpl.java

@Override
public List<GeminiNetwork> getNetworks(GeminiTenant tenant, GeminiEnvironment env) {
    //authenticate the session with the OpenStack installation
    OSClient os = OSFactory.builder().endpoint(env.getEndPoint())
            .credentials(env.getAdminUserName(), env.getAdminPassword()).tenantName(tenant.getName())
            .authenticate();/*from   w  w  w. ja v a 2 s  .  co  m*/
    if (os == null) {
        Logger.error("Failed to authenticate Tenant: {}",
                ToStringBuilder.reflectionToString(tenant, ToStringStyle.MULTI_LINE_STYLE));
        return null;
    }

    //get all the subnets
    List<? extends Network> networks = os.networking().network().list();
    List<GeminiNetwork> gemNetworks = new ArrayList();

    //map the list of network gateways and their subnets to gemini equivalents
    networks.stream().forEach(osn -> {
        GeminiNetwork gn = null;
        try {
            //first see if this network belongs to an application
            gn = env.getApplications().stream().map(GeminiApplication::getNetworks).flatMap(List::stream) //invoke the getNetworks on each application and convert the result into one large stream
                    .filter(n -> n.getCloudID().equals(osn.getId())) //filter on the OpenStack network object cloud id
                    .findFirst().get();
            gn.setCloudID(osn.getId()); //in the event the ID has not been updated
        } catch (NoSuchElementException | NullPointerException e) {
            //not part of an application, see if it is in the orphan list
            try {
                gn = env.getOrphanNetworks().stream().filter(n -> n.getCloudID().equals(osn.getId())) //filter on the OpenStack network object cloud id
                        .findFirst().get();
            } catch (NoSuchElementException | NullPointerException ex) {
                //not an error, just log the event. the network object will be created below
                Logger.debug("Network {} not mapped in Gemini models, creating one...", osn.getName());
            }
        }

        GeminiNetwork newGn = null;
        if (gn == null) {
            newGn = new GeminiNetwork();
            newGn.setName(osn.getName());
            newGn.setCloudID(osn.getId());
            if (osn.getNetworkType() != null) {
                newGn.setNetworkType(osn.getNetworkType().name());
            }
            //we don't which application this network belongs to... so add it to orphan networks list
            env.addOrphanNetwork(newGn);
            gn = newGn;
        }

        //add the subnets to the new network. For some reason Network::getNeutronSubnets
        //always returned null. List all subnets and filter by the parent network id
        List<? extends Subnet> osSubnets = os.networking().subnet().list();
        if (osSubnets != null && !osSubnets.isEmpty()
                && osSubnets.stream().anyMatch(osSubnet -> osSubnet.getNetworkId().equals(osn.getId()))) {
            GeminiNetwork tmpParent = newGn == null ? gn : newGn;
            osSubnets.stream().filter(osSubnet -> osSubnet.getNetworkId().equals(osn.getId()))
                    .forEach(osSubnet -> {
                        if (tmpParent.getSubnets().stream()
                                .noneMatch(s -> s.getName().equals(osSubnet.getName()))) {
                            //this subnet is not mapped on the Gemini side
                            GeminiSubnet geminiSubnet = new GeminiSubnet();
                            geminiSubnet.setCloudID(osSubnet.getId());
                            geminiSubnet.setParent(tmpParent);
                            geminiSubnet.setCidr(osSubnet.getCidr());
                            geminiSubnet.setName(osSubnet.getName());
                            geminiSubnet.setEnableDHCP(osSubnet.isDHCPEnabled());
                            geminiSubnet.setParent(tmpParent);
                            geminiSubnet.setNetworkType(
                                    osSubnet.getIpVersion() == IPVersionType.V4 ? IPAddressType.IPv4
                                            : IPAddressType.IPv6);
                            //                                try {
                            //                                    geminiSubnet.setGateway(env.getGateways().stream().filter(g -> g.getName().equals(osSubnet.getGateway())).findAny().get());
                            //                                } catch (NoSuchElementException | NullPointerException ex) {
                            //                                    Logger.error("Subnet {} has a gateway that isn't mappeed to a an object in Gemini. Gateway {}",
                            //                                            geminiSubnet.getName(), osSubnet.getGateway());
                            //                                    geminiSubnet.setGateway(null);
                            //                                }
                            osSubnet.getAllocationPools().stream().forEach(ap -> {
                                GeminiSubnetAllocationPool geminiAp = new GeminiSubnetAllocationPool(
                                        InetAddresses.forString(ap.getStart()),
                                        InetAddresses.forString(ap.getEnd()));
                                geminiAp.setParent(geminiSubnet);
                                geminiSubnet.addAllocationPool(geminiAp);
                            });
                            tmpParent.addSubnet(geminiSubnet);
                        }
                    });
        }
        gemNetworks.add(gn);
    });
    return gemNetworks;
}

From source file:edu.harvard.iq.dataverse.rserve.RemoteDataFrameService.java

public Map<String, String> execute(RJobRequest sro) {
    dbgLog.fine("RemoteDataFrameService: execute() starts here.");

    // set the return object
    Map<String, String> result = new HashMap<String, String>();

    try {/*from www.  j ava  2s.c om*/
        // TODO: 
        // Split the code below into neat individual methods - for 
        // initializing the connection, loading the remote libraries, 
        // creating remote R vectors for the parameters that will be used 
        // to create the data frame - variable names, labels, etc., and 
        // executing the main request and any necessary post-processing
        // -- L.A. 4.0 alpha 1

        // Set up an Rserve connection
        dbgLog.fine("sro dump:\n" + ToStringBuilder.reflectionToString(sro, ToStringStyle.MULTI_LINE_STYLE));

        dbgLog.fine("RSERVE_USER=" + RSERVE_USER + "[default=rserve]");
        dbgLog.fine("RSERVE_PASSWORD=" + RSERVE_PWD + "[default=rserve]");
        dbgLog.fine("RSERVE_PORT=" + RSERVE_PORT + "[default=6311]");
        dbgLog.fine("RSERVE_HOST=" + RSERVE_HOST);

        RConnection c = new RConnection(RSERVE_HOST, RSERVE_PORT);

        c.login(RSERVE_USER, RSERVE_PWD);
        dbgLog.info(">" + c.eval("R.version$version.string").asString() + "<");

        // check working directories
        // This needs to be done *before* we try to create any files 
        // there!
        setupWorkingDirectory(c);

        // send the data file to the Rserve side:

        String infile = sro.getTabularDataFileName();
        InputStream inb = new BufferedInputStream(new FileInputStream(infile));

        int bufsize;
        byte[] bffr = new byte[1024];

        RFileOutputStream os = c.createFile(tempFileNameIn);
        while ((bufsize = inb.read(bffr)) != -1) {
            os.write(bffr, 0, bufsize);
        }
        os.close();
        inb.close();

        // Rserve code starts here
        dbgLog.fine("wrkdir=" + RSERVE_TMP_DIR);

        String RversionLine = "R.Version()$version.string";
        String Rversion = c.eval(RversionLine).asString();

        // We need to initialize our R session:
        // send custom R code library over to the Rserve and load the code:

        String rscript = readLocalResource(DATAVERSE_R_FUNCTIONS);
        c.voidEval(rscript);

        dbgLog.fine("raw variable type=" + sro.getVariableTypes());
        c.assign("vartyp", new REXPInteger(sro.getVariableTypes()));
        String[] tmpt = c.eval("vartyp").asStrings();
        dbgLog.fine("vartyp length=" + tmpt.length + "\t " + StringUtils.join(tmpt, ","));

        // variable *formats* - not to be confused with variable *types*!
        // these specify extra, optional format specifications - for example, 
        // String variables may represent date and time values. 

        Map<String, String> tmpFmt = sro.getVariableFormats();

        dbgLog.fine("tmpFmt=" + tmpFmt);

        // In the fragment below we create an R list varFrmt storing 
        // these format specifications: 

        if (tmpFmt != null) {
            Set<String> vfkeys = tmpFmt.keySet();
            String[] tmpfk = (String[]) vfkeys.toArray(new String[vfkeys.size()]);
            String[] tmpfv = getValueSet(tmpFmt, tmpfk);
            c.assign("tmpfk", new REXPString(tmpfk));
            c.assign("tmpfv", new REXPString(tmpfv));
            String fmtNamesLine = "names(tmpfv)<- tmpfk";
            c.voidEval(fmtNamesLine);
            String fmtValuesLine = "varFmt<- as.list(tmpfv)";
            c.voidEval(fmtValuesLine);
        } else {
            String[] varFmtN = {};
            List<String> varFmtV = new ArrayList<String>();
            c.assign("varFmt", new REXPList(new RList(varFmtV, varFmtN)));
        }

        // Variable names:
        String[] jvnamesRaw = sro.getVariableNames();
        String[] jvnames = null;

        if (sro.hasUnsafeVariableNames) {
            // create  list
            jvnames = sro.safeVarNames;
            dbgLog.fine("renamed=" + StringUtils.join(jvnames, ","));
        } else {
            jvnames = jvnamesRaw;
        }

        c.assign("vnames", new REXPString(jvnames));

        // confirm:

        String[] tmpjvnames = c.eval("vnames").asStrings();
        dbgLog.fine("vnames:" + StringUtils.join(tmpjvnames, ","));

        // read.dataverseTabData method, from dataverse_r_functions.R, 
        // uses R's standard scan() function to read the tabular data we've 
        // just transfered over and turn it into a dataframe. It adds some 
        // custom post-processing too - restores missing values, converts 
        // strings representing dates and times into R date and time objects, 
        // and more. 

        // Parameters for the read.dataverseTabData method executed on the R side:

        // file -> tempFileName
        // col.names -> Arrays.deepToString(new REXPString(jvnames)).asStrings())
        // colClassesx -> Arrays.deepToString((new REXPInteger(sro.getVariableTypes())).asStrings())
        // varFormat -> Arrays.deepToString((new REXPString(getValueSet(tmpFmt, tmpFmt.keySet().toArray(new String[tmpFmt.keySet().size()])))).asStrings())

        dbgLog.fine("read.dataverseTabData parameters:");
        dbgLog.fine("col.names = " + Arrays.deepToString((new REXPString(jvnames)).asStrings()));
        dbgLog.fine(
                "colClassesx = " + Arrays.deepToString((new REXPInteger(sro.getVariableTypes())).asStrings()));
        dbgLog.fine("varFormat = " + Arrays.deepToString((new REXPString(
                getValueSet(tmpFmt, tmpFmt.keySet().toArray(new String[tmpFmt.keySet().size()]))))
                        .asStrings()));

        String readtableline = "x<-read.dataverseTabData(file='" + tempFileNameIn
                + "', col.names=vnames, colClassesx=vartyp, varFormat=varFmt )";
        dbgLog.fine("readtable=" + readtableline);

        c.voidEval(readtableline);

        if (sro.hasUnsafeVariableNames) {
            dbgLog.fine("unsafeVariableNames exist");
            jvnames = sro.safeVarNames;
            String[] rawNameSet = sro.renamedVariableArray;
            String[] safeNameSet = sro.renamedResultArray;

            c.assign("tmpRN", new REXPString(rawNameSet));
            c.assign("tmpSN", new REXPString(safeNameSet));

            String raw2safevarNameTableLine = "names(tmpRN)<- tmpSN";
            c.voidEval(raw2safevarNameTableLine);
            String attrRsafe2rawLine = "attr(x, 'Rsafe2raw')<- as.list(tmpRN)";
            c.voidEval(attrRsafe2rawLine);
        } else {
            String attrRsafe2rawLine = "attr(x, 'Rsafe2raw')<-list();";
            c.voidEval(attrRsafe2rawLine);
        }

        // Restore NAs (missign values) in the data frame:
        // (these are encoded as empty strings in dataverse tab files)
        // Why are we doing it here? And not in the dataverse_r_functions.R 
        // fragment? 

        String asIsline = "for (i in 1:dim(x)[2]){ " + "if (attr(x,'var.type')[i] == 0) {"
                + "x[[i]]<-I(x[[i]]);  x[[i]][ x[[i]] == '' ]<-NA  }}";
        c.voidEval(asIsline);

        String[] varLabels = sro.getVariableLabels();

        c.assign("varlabels", new REXPString(varLabels));

        String attrVarLabelsLine = "attr(x, 'var.labels')<-varlabels";
        c.voidEval(attrVarLabelsLine);

        // Confirm:
        String[] vlbl = c.eval("attr(x, 'var.labels')").asStrings();
        dbgLog.fine("varlabels=" + StringUtils.join(vlbl, ","));

        // create the VALTABLE and VALORDER lists:
        c.voidEval("VALTABLE<-list()");
        c.voidEval("VALORDER<-list()");

        //In the fragment below, we'll populate the VALTABLE list that we've
        // just created with the actual values and labels of our categorical varaibles.
        // TODO: 
        // This code has been imported from the DVN v2-3
        // implementation. I keep wondering if there is a simpler way to
        // achive this - to pass these maps of values and labels to R 
        // in fewer steps/with less code - ?
        // -- L.A. 4.3

        Map<String, Map<String, String>> vltbl = sro.getValueTable();
        Map<String, List<String>> orderedCategoryValues = sro.getCategoryValueOrders();
        String[] variableIds = sro.getVariableIds();

        for (int j = 0; j < variableIds.length; j++) {
            // if this variable has a value-label table,
            // pass its key and value arrays to Rserve;
            // finalize a value-table on the Rserve side:

            String varId = variableIds[j];

            if (vltbl.containsKey(varId)) {

                Map<String, String> tmp = (HashMap<String, String>) vltbl.get(varId);
                Set<String> vlkeys = tmp.keySet();
                String[] tmpk = (String[]) vlkeys.toArray(new String[vlkeys.size()]);
                String[] tmpv = getValueSet(tmp, tmpk);

                dbgLog.fine("tmp:k=" + StringUtils.join(tmpk, ","));
                dbgLog.fine("tmp:v=" + StringUtils.join(tmpv, ","));

                // index number starts from 1(not 0):
                int indx = j + 1;
                dbgLog.fine("index=" + indx);

                if (tmpv.length > 0) {

                    c.assign("tmpk", new REXPString(tmpk));

                    c.assign("tmpv", new REXPString(tmpv));

                    String namesValueLine = "names(tmpv)<- tmpk";
                    c.voidEval(namesValueLine);

                    String sbvl = "VALTABLE[['" + Integer.toString(indx) + "']]" + "<- as.list(tmpv)";
                    dbgLog.fine("frag=" + sbvl);
                    c.voidEval(sbvl);

                    // confirmation test for j-th variable name
                    REXP jl = c.parseAndEval(sbvl);
                    dbgLog.fine("jl(" + j + ") = " + jl);
                }
            }

            // If this is an ordered categorical value (and that means,
            // it was produced from an ordered factor, from an ingested 
            // R data frame, since no other formats we support have 
            // ordered categoricals), we'll also supply a list of these
            // ordered values:

            if (orderedCategoryValues != null && orderedCategoryValues.containsKey(varId)) {
                int indx = j + 1;
                List<String> orderList = orderedCategoryValues.get(varId);
                if (orderList != null) {
                    String[] ordv = (String[]) orderList.toArray(new String[orderList.size()]);
                    dbgLog.fine("ordv=" + StringUtils.join(ordv, ","));
                    c.assign("ordv", new REXPString(ordv));
                    String sbvl = "VALORDER[['" + Integer.toString(indx) + "']]" + "<- as.list(ordv)";
                    dbgLog.fine("VALORDER[...]=" + sbvl);
                    c.voidEval(sbvl);
                } else {
                    dbgLog.fine("NULL orderedCategoryValues list.");
                }
            }
        }

        // And now we store the VALTABLE and MSVLTBL as attributes of the 
        // dataframe we are cooking:
        dbgLog.fine("length of vl=" + c.eval("length(VALTABLE)").asInteger());
        String attrValTableLine = "attr(x, 'val.table')<-VALTABLE";
        c.voidEval(attrValTableLine);

        String msvStartLine = "MSVLTBL<-list();";
        c.voidEval(msvStartLine);
        String attrMissvalLine = "attr(x, 'missval.table')<-MSVLTBL";
        c.voidEval(attrMissvalLine);

        // But we are not done, with these value label maps... We now need
        // to call these methods from the dataverse_r_functions.R script
        // to further process the lists. Among other things, they will 
        // create these new lists - value index and missing value index, that 
        // simply indicate which variables have any of the above; these will 
        // also be saved as attributes of the data frame, val.index and 
        // missval.index respectively. But, also, the methods will reprocess
        // and overwite the val.table and missval.table attributes already stored in 
        // the dataframe. I don't fully understand why that is necessary, or what it is
        // that we are actually adding to the lists there... Another TODO: ? 

        String createVIndexLine = "x<-createvalindex(dtfrm=x, attrname='val.index');";
        c.voidEval(createVIndexLine);
        String createMVIndexLine = "x<-createvalindex(dtfrm=x, attrname='missval.index');";
        c.voidEval(createMVIndexLine);

        // And now we'll call the last method from the R script - createDataverseDataFrame();
        // It should probably be renamed. The dataframe has already been created. 
        // what this method does, it goes through the frame, and changes the 
        // vectors representing categorical variables to R factors. 
        // For example, if this tabular file was produced from a Stata file 
        // that had a categorical in which "Male" and "Female" were represented 
        // with 0 and 1. In the Dataverse datbase, the string values "Male" and 
        // "Female" are now stored as "categorical value labels". And the column 
        // in the tab file has numeric 1 and 0s. That's what the R
        // dataframe was created from, so it now has a numeric vector of 1s and 0s
        // representing this variable. So in this step we are going 
        // to change this vector into a factor, using the labels and values 
        // that we already passed over via Rserve and stored in the val.table, above. 

        // TODO: 
        // I'm going to propose that we go back to what we used to do back in 
        // DVN 2-3.* - instead of giving the user a single dataframe (.RData) 
        // file, provide a zip file, with the data frame, and also a README 
        // file with some documentation explaining how the data frame was 
        // created, and pointing out some potential issues stemming from the 
        // conversion between formats. Converting Stata categoricals into 
        // R factors is one of such issues (if nothing else, do note that 
        // the UNF of the datafile with the column described in the example 
        // above will change, if the resulting R dataframe is reingested! See 
        // the UNF documentation for more info...). We may also make this 
        // download interactive - giving the user some options for how 
        // to handle the conversion (so, another choice would be to convert 
        // the above to a factor of "0" and "1"s), etc. 
        // -- L.A. 4.3

        String dataFileName = "Data." + PID + "." + sro.getFormatRequested();

        // data file to be copied back to the dvn
        String dsnprfx = RSERVE_TMP_DIR + "/" + dataFileName;

        String dataverseDataFrameCommand = "createDataverseDataFrame(dtfrm=x," + "dwnldoptn='"
                + sro.getFormatRequested() + "'" + ", dsnprfx='" + dsnprfx + "')";

        c.voidEval(dataverseDataFrameCommand);

        int wbFileSize = getFileSize(c, dsnprfx);

        dbgLog.fine("wbFileSize=" + wbFileSize);

        // If the above succeeded, the dataframe has been saved on the 
        // Rserve side as an .Rdata file. Now we can transfer it back to the
        // dataverse side:

        File localDataFrameFile = transferRemoteFile(c, dsnprfx, RWRKSP_FILE_PREFIX, "RData", wbFileSize);

        result.put("dataFrameFileName", localDataFrameFile.getAbsolutePath());

        if (localDataFrameFile != null) {
            dbgLog.fine("data frame file name: " + localDataFrameFile.getAbsolutePath());
        } else {
            dbgLog.fine("data frame file is null!");
            // throw an exception??
        }

        result.put("Rversion", Rversion);

        dbgLog.fine("result object (before closing the Rserve):\n" + result);

        String deleteLine = "file.remove('" + tempFileNameIn + "')";
        c.eval(deleteLine);

        c.close();

    } catch (RserveException rse) {
        // RserveException (Rserve is not running maybe?)
        // TODO: *ABSOLUTELY* need more diagnostics here!
        rse.printStackTrace();
        result.put("RexecError", "true");
        return result;

    } catch (REXPMismatchException mme) {
        mme.printStackTrace();
        result.put("RexecError", "true");
        return result;

    } catch (IOException ie) {
        ie.printStackTrace();
        result.put("RexecError", "true");
        return result;

    } catch (Exception ex) {
        ex.printStackTrace();
        result.put("RexecError", "true");
        return result;
    }

    return result;

}

From source file:com.sonatype.security.ldap.api.MarshalUnmarchalTest.java

private String toDebugString(Object obj) {
    return ToStringBuilder.reflectionToString(obj, ToStringStyle.MULTI_LINE_STYLE);
}