Example usage for java.math BigInteger ZERO

List of usage examples for java.math BigInteger ZERO

Introduction

In this page you can find the example usage for java.math BigInteger ZERO.

Prototype

BigInteger ZERO

To view the source code for java.math BigInteger ZERO.

Click Source Link

Document

The BigInteger constant zero.

Usage

From source file:be.fedict.eid.pkira.blm.model.contracts.ContractRepositoryBean.java

private void addQueryParameters(String userRrn, CertificatesFilter certificatesFilter, Query query) {
    query.setParameter("nationalRegisterNumber", userRrn);
    query.setParameter("registrationStatus", RegistrationStatus.APPROVED);
    if (certificatesFilter.getCertificateDomainId() != null)
        query.setParameter("certificateDomainId", certificatesFilter.getCertificateDomainId());
    if (isNotBlank(certificatesFilter.getDistinguishedName()))
        query.setParameter("distinguishedName", certificatesFilter.getDistinguishedName());
    if (isNotBlank(certificatesFilter.getIssuer()))
        query.setParameter("issuer", certificatesFilter.getIssuer());
    if (isNotBlank(certificatesFilter.getSerialNumber())) {
        BigInteger serialNumber;//from ww w  .  java2 s  . c  o m
        try {
            serialNumber = new BigInteger(certificatesFilter.getSerialNumber());
        } catch (NumberFormatException e) {
            serialNumber = BigInteger.ZERO;
        }
        query.setParameter("serialNumber", serialNumber);
    }
    if (certificatesFilter.getCertificateType() != null) {
        query.setParameter("certificateType", certificatesFilter.getCertificateType());
    }
    if (certificatesFilter.getValidityStartFrom() != null)
        query.setParameter("validityStartFrom", certificatesFilter.getValidityStartFrom());
    if (certificatesFilter.getValidityStartTo() != null)
        query.setParameter("validityStartTo", certificatesFilter.getValidityStartTo());
    if (certificatesFilter.getValidityEndFrom() != null)
        query.setParameter("validityEndFrom", certificatesFilter.getValidityEndFrom());
    if (certificatesFilter.getValidityEndTo() != null)
        query.setParameter("validityEndTo", certificatesFilter.getValidityEndTo());
}

From source file:jp.aegif.nemaki.cmis.aspect.impl.ExceptionServiceImpl.java

@Override
public void invalidArgumentDepth(BigInteger depth) {
    if (depth == BigInteger.ZERO) {
        invalidArgument("Depth must not be zero");
    } else if (depth == BigInteger.valueOf(-1)) {
        invalidArgument("Depth must not be less than -1");
    }/* w ww . ja va  2s . com*/
}

From source file:org.apache.chemistry.opencmis.client.bindings.spi.http.AbstractApacheClientHttpInvoker.java

protected Response invoke(UrlBuilder url, String method, String contentType, Map<String, String> headers,
        final Output writer, final BindingSession session, BigInteger offset, BigInteger length) {
    int respCode = -1;

    try {//from w w w  . jav  a  2  s .co m
        // log before connect
        if (LOG.isDebugEnabled()) {
            LOG.debug("Session {}: {} {}", session.getSessionId(), method, url);
        }

        // get HTTP client object from session
        DefaultHttpClient httpclient = (DefaultHttpClient) session.get(HTTP_CLIENT);
        if (httpclient == null) {
            session.writeLock();
            try {
                httpclient = (DefaultHttpClient) session.get(HTTP_CLIENT);
                if (httpclient == null) {
                    httpclient = createHttpClient(url, session);
                    session.put(HTTP_CLIENT, httpclient, true);
                }
            } finally {
                session.writeUnlock();
            }
        }

        HttpRequestBase request = null;

        if ("GET".equals(method)) {
            request = new HttpGet(url.toString());
        } else if ("POST".equals(method)) {
            request = new HttpPost(url.toString());
        } else if ("PUT".equals(method)) {
            request = new HttpPut(url.toString());
        } else if ("DELETE".equals(method)) {
            request = new HttpDelete(url.toString());
        } else {
            throw new CmisRuntimeException("Invalid HTTP method!");
        }

        // set content type
        if (contentType != null) {
            request.setHeader("Content-Type", contentType);
        }
        // set other headers
        if (headers != null) {
            for (Map.Entry<String, String> header : headers.entrySet()) {
                request.addHeader(header.getKey(), header.getValue());
            }
        }

        // authenticate
        AuthenticationProvider authProvider = CmisBindingsHelper.getAuthenticationProvider(session);
        if (authProvider != null) {
            Map<String, List<String>> httpHeaders = authProvider.getHTTPHeaders(url.toString());
            if (httpHeaders != null) {
                for (Map.Entry<String, List<String>> header : httpHeaders.entrySet()) {
                    if (header.getKey() != null && isNotEmpty(header.getValue())) {
                        String key = header.getKey();
                        if (key.equalsIgnoreCase("user-agent")) {
                            request.setHeader("User-Agent", header.getValue().get(0));
                        } else {
                            for (String value : header.getValue()) {
                                if (value != null) {
                                    request.addHeader(key, value);
                                }
                            }
                        }
                    }
                }
            }
        }

        // range
        if ((offset != null) || (length != null)) {
            StringBuilder sb = new StringBuilder("bytes=");

            if ((offset == null) || (offset.signum() == -1)) {
                offset = BigInteger.ZERO;
            }

            sb.append(offset.toString());
            sb.append('-');

            if ((length != null) && (length.signum() == 1)) {
                sb.append(offset.add(length.subtract(BigInteger.ONE)).toString());
            }

            request.setHeader("Range", sb.toString());
        }

        // compression
        Object compression = session.get(SessionParameter.COMPRESSION);
        if ((compression != null) && Boolean.parseBoolean(compression.toString())) {
            request.setHeader("Accept-Encoding", "gzip,deflate");
        }

        // locale
        if (session.get(CmisBindingsHelper.ACCEPT_LANGUAGE) instanceof String) {
            request.setHeader("Accept-Language", session.get(CmisBindingsHelper.ACCEPT_LANGUAGE).toString());
        }

        // send data
        if (writer != null) {
            Object clientCompression = session.get(SessionParameter.CLIENT_COMPRESSION);
            final boolean clientCompressionFlag = (clientCompression != null)
                    && Boolean.parseBoolean(clientCompression.toString());
            if (clientCompressionFlag) {
                request.setHeader("Content-Encoding", "gzip");
            }

            AbstractHttpEntity streamEntity = new AbstractHttpEntity() {
                @Override
                public boolean isChunked() {
                    return true;
                }

                @Override
                public boolean isRepeatable() {
                    return false;
                }

                @Override
                public long getContentLength() {
                    return -1;
                }

                @Override
                public boolean isStreaming() {
                    return false;
                }

                @Override
                public InputStream getContent() throws IOException {
                    throw new UnsupportedOperationException();
                }

                @Override
                public void writeTo(final OutputStream outstream) throws IOException {
                    OutputStream connOut = null;

                    if (clientCompressionFlag) {
                        connOut = new GZIPOutputStream(outstream, 4096);
                    } else {
                        connOut = outstream;
                    }

                    OutputStream out = new BufferedOutputStream(connOut, BUFFER_SIZE);
                    try {
                        writer.write(out);
                    } catch (IOException ioe) {
                        throw ioe;
                    } catch (Exception e) {
                        throw new IOException(e);
                    }
                    out.flush();

                    if (connOut instanceof GZIPOutputStream) {
                        ((GZIPOutputStream) connOut).finish();
                    }
                }
            };
            ((HttpEntityEnclosingRequestBase) request).setEntity(streamEntity);
        }

        // connect
        HttpResponse response = httpclient.execute(request);
        HttpEntity entity = response.getEntity();

        // get stream, if present
        respCode = response.getStatusLine().getStatusCode();
        InputStream inputStream = null;
        InputStream errorStream = null;

        if ((respCode == 200) || (respCode == 201) || (respCode == 203) || (respCode == 206)) {
            if (entity != null) {
                inputStream = entity.getContent();
            } else {
                inputStream = new ByteArrayInputStream(new byte[0]);
            }
        } else {
            if (entity != null) {
                errorStream = entity.getContent();
            } else {
                errorStream = new ByteArrayInputStream(new byte[0]);
            }
        }

        // collect headers
        Map<String, List<String>> responseHeaders = new HashMap<String, List<String>>();
        for (Header header : response.getAllHeaders()) {
            List<String> values = responseHeaders.get(header.getName());
            if (values == null) {
                values = new ArrayList<String>();
                responseHeaders.put(header.getName(), values);
            }
            values.add(header.getValue());
        }

        // log after connect
        if (LOG.isTraceEnabled()) {
            LOG.trace("Session {}: {} {} > Headers: {}", session.getSessionId(), method, url,
                    responseHeaders.toString());
        }

        // forward response HTTP headers
        if (authProvider != null) {
            authProvider.putResponseHeaders(url.toString(), respCode, responseHeaders);
        }

        // get the response
        return new Response(respCode, response.getStatusLine().getReasonPhrase(), responseHeaders, inputStream,
                errorStream);
    } catch (Exception e) {
        String status = (respCode > 0 ? " (HTTP status code " + respCode + ")" : "");
        throw new CmisConnectionException("Cannot access \"" + url + "\"" + status + ": " + e.getMessage(), e);
    }
}

From source file:io.ecarf.core.cloud.task.processor.reason.phase2.DoReasonTask7.java

@Override
public void run() throws IOException {

    GoogleCloudService cloud = (GoogleCloudService) this.getCloudService();

    Stopwatch stopwatch1 = Stopwatch.createUnstarted();
    Stopwatch stopwatch2 = Stopwatch.createUnstarted();
    Set<String> termsSet;

    if (terms == null) {
        // too large, probably saved as a file

        log.info("Using json file for terms: " + termsFile);
        Validate.notNull(termsFile);/*  ww  w. j  ava 2 s . c om*/

        String localTermsFile = Utils.TEMP_FOLDER + termsFile;
        cloud.downloadObjectFromCloudStorage(termsFile, localTermsFile, bucket);

        // convert from JSON
        termsSet = io.cloudex.framework.utils.FileUtils.jsonFileToSet(localTermsFile);

    } else {
        termsSet = ObjectUtils.csvToSet(terms);
    }

    String localSchemaFile = Utils.TEMP_FOLDER + schemaFile;
    // download the file from the cloud storage
    cloud.downloadObjectFromCloudStorage(schemaFile, localSchemaFile, bucket);

    // uncompress if compressed
    if (GzipUtils.isCompressedFilename(schemaFile)) {
        localSchemaFile = GzipUtils.getUncompressedFilename(localSchemaFile);
    }

    Map<Long, Set<Triple>> allSchemaTriples = TripleUtils.getRelevantSchemaETriples(localSchemaFile,
            TermUtils.RDFS_TBOX);

    // get all the triples we care about
    schemaTerms = new HashMap<>();

    for (String termStr : termsSet) {

        Long term = Long.parseLong(termStr);

        if (allSchemaTriples.containsKey(term)) {
            schemaTerms.put(term, allSchemaTriples.get(term));
        }
    }

    String decoratedTable = table;
    int emptyRetries = 0;
    int totalInferredTriples = 0;
    int maxRetries = Config.getIntegerProperty(Constants.REASON_RETRY_KEY, 6);
    String instanceId = cloud.getInstanceId();

    QueryGenerator<Long> generator = new QueryGenerator<Long>(schemaTerms, null);

    // timestamp loop
    do {

        Set<Long> productiveTerms = new HashSet<>();
        int interimInferredTriples = 0;

        // First of all run all the queries asynchronously and remember the jobId and filename for each term

        List<QueryResult> queryResults = new ArrayList<QueryResult>();
        generator.setDecoratedTable(decoratedTable);

        List<String> queries = generator.getQueries();
        log.debug("Generated Queries: " + queries);
        String queryResultFilePrefix = Utils.TEMP_FOLDER + instanceId + '_' + System.currentTimeMillis()
                + "_QueryResults_";
        int fileCount = 0;
        for (String query : queries) {
            String jobId = cloud.startBigDataQuery(query);
            queryResults
                    .add(QueryResult.create().setFilename(queryResultFilePrefix + fileCount).setJobId(jobId));
            fileCount++;
        }

        // invoke all the queries in parallel
        //this.invokeAll(queryTasks);

        long start = System.currentTimeMillis();

        String inferredTriplesFile = Utils.TEMP_FOLDER + instanceId + '_' + start + Constants.DOT_INF;

        // save all the query results in files in parallel
        //this.invokeAll(saveTasks);

        for (QueryResult queryResult : queryResults) {
            try {
                // block and wait for each job to complete then save results to a file
                QueryStats stats = cloud.saveBigQueryResultsToFile(queryResult.getJobId(),
                        queryResult.getFilename());
                queryResult.setStats(stats);

            } catch (IOException ioe) {
                // transient backend errors
                log.warn("failed to save query results to file, jobId: " + queryResult.getJobId(), ioe);
                //TODO should throw an exception
            }
        }

        try (PrintWriter writer = new PrintWriter(
                new GZIPOutputStream(new FileOutputStream(inferredTriplesFile), Constants.GZIP_BUF_SIZE))) {

            // now loop through the queries
            //for(Entry<Term, Set<Triple>> entry: schemaTerms.entrySet()) {
            for (QueryResult queryResult : queryResults) {

                //Term term = entry.getKey();
                QueryStats stats = queryResult.getStats();

                BigInteger rows = stats.getTotalRows();//term.getRows();

                this.totalBytes = this.totalBytes + stats.getTotalProcessedBytes();//term.getBytes();

                // only process if triples are found matching this term
                if (!BigInteger.ZERO.equals(rows)) {

                    stopwatch1.start();

                    int inferredTriplesCount = this.inferAndSaveTriplesToFile(queryResult, productiveTerms,
                            decoratedTable, writer);

                    interimInferredTriples += inferredTriplesCount;

                    this.totalRows = this.totalRows.add(rows);

                    stopwatch1.stop();

                } else {
                    log.info("Skipping query as no data is found");
                }
            }
        }

        totalInferredTriples += interimInferredTriples;

        if (interimInferredTriples > 0) {

            // stream smaller numbers of inferred triples
            // try uploading from cloud storage
            int streamingThreshold = Config.getIntegerProperty("ecarf.io.reasoning.streaming.threshold",
                    100000);

            log.info("Inserting " + interimInferredTriples + ", inferred triples into Big Data table for "
                    + productiveTerms.size() + " productive terms. Filename: " + inferredTriplesFile);

            if (interimInferredTriples <= streamingThreshold) {
                // stream the data

                Set<Triple> inferredTriples = TripleUtils.loadCompressedCSVTriples(inferredTriplesFile, true);
                log.info("Total triples to stream into Big Data: " + inferredTriples.size());
                cloud.streamObjectsIntoBigData(inferredTriples,
                        TableUtils.getBigQueryEncodedTripleTable(table));

                log.info("All inferred triples are streamed into Big Data table");

            } else {

                // load the data through cloud storage
                // upload the file to cloud storage
                log.info("Uploading inferred triples file into cloud storage: " + inferredTriplesFile);
                StorageObject file = cloud.uploadFileToCloudStorage(inferredTriplesFile, bucket);
                log.info("File " + file + ", uploaded successfully. Now loading it into big data.");

                String jobId = cloud.loadCloudStorageFilesIntoBigData(Lists.newArrayList(file.getUri()),
                        TableUtils.getBigQueryEncodedTripleTable(table), false);
                log.info(
                        "All inferred triples are loaded into Big Data table through cloud storage, completed jobId: "
                                + jobId);

            }

            // reset empty retries
            emptyRetries = 0;

            stopwatch2.reset();

        } else {
            log.info("No new inferred triples");
            // increment empty retries
            emptyRetries++;

            if (!stopwatch2.isRunning()) {
                stopwatch2.start();
            }
        }

        log.info("Total inferred triples so far = " + totalInferredTriples + ", current retry count: "
                + emptyRetries);

        if (emptyRetries < maxRetries) {
            ApiUtils.block(Config.getIntegerProperty(Constants.REASON_SLEEP_KEY, 20));

            // FIXME move into the particular cloud implementation service
            long elapsed = System.currentTimeMillis() - start;
            decoratedTable = "[" + table + "@-" + elapsed + "-]";

            log.info("Using table decorator: " + decoratedTable + ". Empty retries count: " + emptyRetries);
        }

    } while (emptyRetries < maxRetries); // end timestamp loop

    //executor.shutdown();
    log.info("Finished reasoning, total inferred triples = " + totalInferredTriples);
    //log.info("Number of avoided duplicate terms = " + this.duplicates);
    log.info("Total rows retrieved from big data = " + this.totalRows);
    log.info("Total processed GBytes = " + ((double) this.totalBytes / FileUtils.ONE_GB));
    log.info("Total process reasoning time (serialization in inf file) = " + stopwatch1);
    log.info("Total time spent in empty inference cycles = " + stopwatch2);
}

From source file:com.sdcs.courierbooking.service.UserServiceImpl.java

/**
 * This method sends data to DAO to save details in database
 * // w ww.j  av  a2 s  .c o  m
 * @param sdcsUser
 * @return Json string for newly created user
 */
@Override
public String registerDeliveryBoy(SdcsDeliveryBoy sdcsDeliveryBoy) {

    SdcsDeliveryBoy newDeliveryBoy = userDao.registerDeliveryBoy(sdcsDeliveryBoy);

    JSONObject registrationJsonObject = new JSONObject();

    if (newDeliveryBoy == null) {
        registrationJsonObject.put("result", false);
        registrationJsonObject.put("message",
                "The mobile number is already registered with SDCS. Please register with different mobile number.");
    } else {
        try {
            String strEmail = sdcsDeliveryBoy.getEmailAddress();
            String strname = sdcsDeliveryBoy.getFullName();

            String mailBody = "<b>Dear " + strname + ",</b>" + "<br>"
                    + "<b><i> Thank you for being a part of SDCS.</i></b>" + "<br>" + "<b> Your password is:"
                    + sdcsDeliveryBoy.getAccessKey() + "<b>" + "<br>" + "Thanks" + "<br>" + "SDCS Team" + "<br>"
                    + "info@sdcs.me";
            SdcsEmailComponent.sendMail(strEmail, "Welcome To SDCS", mailBody, BigInteger.ZERO);
        } catch (Exception e) {

        }
        registrationJsonObject.put("result", true);
        registrationJsonObject.put("message", "Congratulations! registered successfully!");
        registrationJsonObject.put("user_id", newDeliveryBoy.getDeliveryBoyId());
        registrationJsonObject.put("full_name", newDeliveryBoy.getFullName());
        registrationJsonObject.put("email", newDeliveryBoy.getEmailAddress());
        registrationJsonObject.put("mobile", newDeliveryBoy.getMobileNumber());
        registrationJsonObject.put("driving_license_number", newDeliveryBoy.getDrivingLicenseNumber());
    }
    return registrationJsonObject.toString();
}

From source file:org.proteomecommons.tranche.cacheupdater.CacheUpdater.java

private void populateHashesSet() {
    log.println("Discovering known servers");

    // wait for bootup
    ServerUtil.waitForStartup();//from ww w  .ja  v a  2  s  . c om

    // add the servers
    for (ServerInfo server : ServerUtil.getServers()) {
        log.println("Adding server to list: " + server.getUrl());
        servers.add(server.getUrl());
    }

    // get the second level servers that haven't already been checked
    HashSet<String> secondaryServers = new HashSet<String>();
    for (String server : servers) {
        ServerInfo si = ServerUtil.getServerInfo(server);
        secondaryServers.addAll(si.getKnownServers());
    }

    // add all the secondary servers to the list of servers
    for (String url : secondaryServers) {
        if (!servers.contains(url)) {
            log.println("Adding secondary server to list: " + url);
            servers.add(url);
        }
    }

    log.println("Finished discovering servers.");

    Thread t = new Thread("Cache Updater Hash Finder") {

        public void run() {
            List<Thread> threadList = new ArrayList<Thread>();
            for (final String server : servers) {
                Thread s = new Thread("Cache Updater Hash Finder: " + server) {

                    public void run() {
                        try {
                            log.println("Connecting to " + server);
                            // bootstrap
                            ServerUtil.isServerOnline(server);
                            // connect to the server
                            TrancheServer ts = IOUtil.connect(server);
                            try {
                                // get all of the projects
                                BigInteger limit = BigInteger.valueOf(100);
                                BigInteger offset = BigInteger.ZERO;

                                // get the hashes
                                for (BigHash[] serverHashes = ts.getProjectHashes(offset,
                                        limit); serverHashes.length > 0; serverHashes = ts
                                                .getProjectHashes(offset, limit)) {
                                    // increment the offset
                                    offset = offset.add(BigInteger.valueOf(serverHashes.length));

                                    // add each hash
                                    for (BigHash hash : serverHashes) {
                                        synchronized (hashesOnNetwork) {
                                            hashesOnNetwork.add(hash);
                                        }
                                    }
                                }
                            } finally {
                                IOUtil.safeClose(ts);
                            }
                        } catch (Exception e) {
                            log.println("ERROR: Could not get project hashes from " + server);
                            err.println(server + ": " + e.getMessage());
                        }
                    }
                };
                s.start();
                threadList.add(s);
            }

            for (Thread t : threadList) {
                try {
                    t.join();
                } catch (Exception e) {
                }
            }
        }
    };
    t.start();

    // move on after three minutes of looking
    try {
        t.join(3 * 60 * 1000);
    } catch (Exception e) {
    }

    log.println(hashesOnNetwork.size() + " project hashes discovered.");
}

From source file:strat.mining.stratum.proxy.utils.mining.SHA256HashingUtils.java

/**
 * Compute and return the real difficulty of this share.
 * /*  w  ww . j a  v  a  2 s .  co m*/
 * @return
 */
public static Double getRealShareDifficulty(String blockHeader) {
    BigInteger realDifficulty = BigInteger.ZERO;
    BigInteger hash = getBlockHeaderHash(blockHeader);
    realDifficulty = DIFFICULTY_1_TARGET.toBigInteger().divide(hash);
    return realDifficulty.doubleValue();
}

From source file:io.ecarf.core.cloud.task.processor.reason.phase2.DoReasonTask6.java

@Override
public void run() throws IOException {

    GoogleCloudService cloud = (GoogleCloudService) this.getCloudService();

    //String table = metadata.getValue(EcarfMetaData.ECARF_TABLE);
    //Set<String> terms = metadata.getTerms();
    //String schemaFile = metadata.getValue(EcarfMetaData.ECARF_SCHEMA);
    //String bucket = metadata.getBucket();
    Stopwatch stopwatch1 = Stopwatch.createUnstarted();
    Stopwatch stopwatch2 = Stopwatch.createUnstarted();
    Set<String> termsSet;

    if (terms == null) {
        // too large, probably saved as a file
        //String termsFile = metadata.getValue(EcarfMetaData.ECARF_TERMS_FILE);
        log.info("Using json file for terms: " + termsFile);
        Validate.notNull(termsFile);/* w w  w.j  a v  a2  s.  c o m*/

        String localTermsFile = Utils.TEMP_FOLDER + termsFile;
        cloud.downloadObjectFromCloudStorage(termsFile, localTermsFile, bucket);

        // convert from JSON
        termsSet = io.cloudex.framework.utils.FileUtils.jsonFileToSet(localTermsFile);

    } else {
        termsSet = ObjectUtils.csvToSet(terms);
    }

    String localSchemaFile = Utils.TEMP_FOLDER + schemaFile;
    // download the file from the cloud storage
    cloud.downloadObjectFromCloudStorage(schemaFile, localSchemaFile, bucket);

    // uncompress if compressed
    if (GzipUtils.isCompressedFilename(schemaFile)) {
        localSchemaFile = GzipUtils.getUncompressedFilename(localSchemaFile);
    }

    Map<String, Set<Triple>> allSchemaTriples = TripleUtils.getRelevantSchemaNTriples(localSchemaFile,
            TermUtils.RDFS_TBOX);

    // get all the triples we care about
    schemaTerms = new HashMap<>();

    for (String term : termsSet) {
        if (allSchemaTriples.containsKey(term)) {
            schemaTerms.put(term, allSchemaTriples.get(term));
        }
    }

    String decoratedTable = table;
    int emptyRetries = 0;
    int totalInferredTriples = 0;
    int maxRetries = Config.getIntegerProperty(Constants.REASON_RETRY_KEY, 6);
    String instanceId = cloud.getInstanceId();

    QueryGenerator<String> generator = new QueryGenerator<String>(schemaTerms, null);

    // timestamp loop
    do {

        Set<String> productiveTerms = new HashSet<>();
        int interimInferredTriples = 0;

        // First of all run all the queries asynchronously and remember the jobId and filename for each term

        List<QueryResult> queryResults = new ArrayList<QueryResult>();
        generator.setDecoratedTable(decoratedTable);

        List<String> queries = generator.getQueries();
        log.debug("Generated Queries: " + queries);
        String queryResultFilePrefix = Utils.TEMP_FOLDER + instanceId + '_' + System.currentTimeMillis()
                + "_QueryResults_";
        int fileCount = 0;
        for (String query : queries) {
            String jobId = cloud.startBigDataQuery(query);
            queryResults
                    .add(QueryResult.create().setFilename(queryResultFilePrefix + fileCount).setJobId(jobId));
            fileCount++;
        }

        // invoke all the queries in parallel
        //this.invokeAll(queryTasks);

        long start = System.currentTimeMillis();

        String inferredTriplesFile = Utils.TEMP_FOLDER + instanceId + '_' + start + Constants.DOT_INF;

        // save all the query results in files in parallel
        //this.invokeAll(saveTasks);

        for (QueryResult queryResult : queryResults) {
            try {
                // block and wait for each job to complete then save results to a file
                QueryStats stats = cloud.saveBigQueryResultsToFile(queryResult.getJobId(),
                        queryResult.getFilename());
                queryResult.setStats(stats);

            } catch (IOException ioe) {
                // transient backend errors
                log.warn("failed to save query results to file, jobId: " + queryResult.getJobId(), ioe);
                //TODO should throw an exception
            }
        }

        try (PrintWriter writer = new PrintWriter(
                new GZIPOutputStream(new FileOutputStream(inferredTriplesFile), Constants.GZIP_BUF_SIZE))) {

            // now loop through the queries
            //for(Entry<Term, Set<Triple>> entry: schemaTerms.entrySet()) {
            for (QueryResult queryResult : queryResults) {

                //Term term = entry.getKey();
                QueryStats stats = queryResult.getStats();

                BigInteger rows = stats.getTotalRows();//term.getRows();

                this.totalBytes = this.totalBytes + stats.getTotalProcessedBytes();//term.getBytes();

                // only process if triples are found matching this term
                if (!BigInteger.ZERO.equals(rows)) {

                    stopwatch1.start();

                    int inferredTriplesCount = this.inferAndSaveTriplesToFile(queryResult, productiveTerms,
                            decoratedTable, writer);

                    interimInferredTriples += inferredTriplesCount;

                    this.totalRows = this.totalRows.add(rows);

                    stopwatch1.stop();

                } else {
                    log.info("Skipping query as no data is found");
                }
            }
        }

        totalInferredTriples += interimInferredTriples;

        if (interimInferredTriples > 0) {

            // stream smaller numbers of inferred triples
            // try uploading from cloud storage
            int streamingThreshold = Config.getIntegerProperty("ecarf.io.reasoning.streaming.threshold",
                    100000);

            log.info("Inserting " + interimInferredTriples + ", inferred triples into Big Data table for "
                    + productiveTerms.size() + " productive terms. Filename: " + inferredTriplesFile);

            if (interimInferredTriples <= streamingThreshold) {
                // stream the data

                Set<Triple> inferredTriples = TripleUtils.loadCompressedCSVTriples(inferredTriplesFile, false);
                log.info("Total triples to stream into Big Data: " + inferredTriples.size());
                cloud.streamObjectsIntoBigData(inferredTriples, TableUtils.getBigQueryTripleTable(table));

                log.info("All inferred triples are streamed into Big Data table");

            } else {

                // load the data through cloud storage
                // upload the file to cloud storage
                log.info("Uploading inferred triples file into cloud storage: " + inferredTriplesFile);
                StorageObject file = cloud.uploadFileToCloudStorage(inferredTriplesFile, bucket);
                log.info("File " + file + ", uploaded successfully. Now loading it into big data.");

                String jobId = cloud.loadCloudStorageFilesIntoBigData(Lists.newArrayList(file.getUri()),
                        TableUtils.getBigQueryTripleTable(table), false);
                log.info(
                        "All inferred triples are loaded into Big Data table through cloud storage, completed jobId: "
                                + jobId);

            }

            // reset empty retries
            emptyRetries = 0;

            stopwatch2.reset();

        } else {
            log.info("No new inferred triples");
            // increment empty retries
            emptyRetries++;

            if (!stopwatch2.isRunning()) {
                stopwatch2.start();
            }
        }

        log.info("Total inferred triples so far = " + totalInferredTriples + ", current retry count: "
                + emptyRetries);

        if (emptyRetries < maxRetries) {
            ApiUtils.block(Config.getIntegerProperty(Constants.REASON_SLEEP_KEY, 20));

            // FIXME move into the particular cloud implementation service
            long elapsed = System.currentTimeMillis() - start;
            decoratedTable = "[" + table + "@-" + elapsed + "-]";

            log.info("Using table decorator: " + decoratedTable + ". Empty retries count: " + emptyRetries);
        }

    } while (emptyRetries < maxRetries); // end timestamp loop

    //executor.shutdown();
    log.info("Finished reasoning, total inferred triples = " + totalInferredTriples);
    log.info("Number of avoided duplicate terms = " + this.duplicates);
    log.info("Total rows retrieved from big data = " + this.totalRows);
    log.info("Total processed GBytes = " + ((double) this.totalBytes / FileUtils.ONE_GB));
    log.info("Total process reasoning time (serialization in inf file) = " + stopwatch1);
    log.info("Total time spent in empty inference cycles = " + stopwatch2);
}

From source file:org.openestate.io.trovit.TrovitUtils.java

public static IntBool parseIntBool(String value) {
    value = StringUtils.trimToNull(value);

    if (value == null)
        return null;
    else if ("0".equalsIgnoreCase(value))
        return new IntBool(BigInteger.ZERO);
    else if ("1".equalsIgnoreCase(value))
        return new IntBool(BigInteger.ONE);

    try {/*from ww w.  j a  va2  s.co m*/
        Boolean boolValue = parseBool(value);
        if (boolValue != null)
            return new IntBool(boolValue);
    } catch (Exception ex) {
    }

    try {
        BigInteger intValue = DatatypeConverter.parseInteger(value);
        if (intValue != null)
            return new IntBool(intValue);
    } catch (Exception ex) {
    }

    throw new IllegalArgumentException("Can't parse int-bool value '" + value + "'!");
}

From source file:com.eventsourcing.postgresql.PostgreSQLJournalTest.java

@Test
@SneakyThrows/*from   w ww  . j  a v a  2 s.com*/
public void serializationNull() {
    HybridTimestamp timestamp = new HybridTimestamp(timeProvider);
    timestamp.update();

    Journal.Transaction tx = journal.beginTransaction();
    SerializationEvent event = SerializationEvent.builder().test(TestClass.builder().build()).build();
    event = (SerializationEvent) journal.journal(tx, event);
    tx.rollback();

    TestClass test = event.getTest();

    assertEquals(test.pByte, 0);
    assertEquals(test.oByte, Byte.valueOf((byte) 0));

    assertEquals(test.pByteArr.length, 0);
    assertEquals(test.oByteArr.length, 0);

    assertEquals(test.pShort, 0);
    assertEquals(test.oShort, Short.valueOf((short) 0));

    assertEquals(test.pInt, 0);
    assertEquals(test.oInt, Integer.valueOf(0));

    assertEquals(test.pLong, 0);
    assertEquals(test.oLong, Long.valueOf(0));

    assertTrue(test.pFloat == 0.0);
    assertEquals(test.oFloat, Float.valueOf((float) 0.0));

    assertEquals(test.pDouble, 0.0);
    assertEquals(test.oDouble, Double.valueOf(0.0));

    assertEquals(test.pBoolean, false);
    assertEquals(test.oBoolean, Boolean.FALSE);

    assertEquals(test.str, "");

    assertEquals(test.uuid, new UUID(0, 0));

    assertEquals(test.e, TestClass.E.A);

    assertNotNull(test.value);
    assertEquals(test.value.value, "");

    assertNotNull(test.value1);
    assertTrue(test.value1.value().isEmpty());

    assertNotNull(test.list);
    assertEquals(test.list.size(), 0);

    assertNotNull(test.map);
    assertEquals(test.map.size(), 0);

    assertNotNull(test.optional);
    assertFalse(test.optional.isPresent());

    assertNotNull(test.bigDecimal);
    assertEquals(test.bigDecimal, BigDecimal.ZERO);

    assertNotNull(test.bigInteger);
    assertEquals(test.bigInteger, BigInteger.ZERO);

    assertNotNull(test.date);
    assertEquals(test.date, new Date(0));

}