Example usage for java.io IOException getClass

List of usage examples for java.io IOException getClass

Introduction

In this page you can find the example usage for java.io IOException getClass.

Prototype

@HotSpotIntrinsicCandidate
public final native Class<?> getClass();

Source Link

Document

Returns the runtime class of this Object .

Usage

From source file:org.paxle.crawler.ftp.impl.FtpCrawler.java

public ICrawlerDocument request(URI requestUri) {
    if (requestUri == null)
        throw new NullPointerException("URL was null");
    this.logger.info(String.format("Crawling URL '%s' ...", requestUri));

    ICrawlerDocument crawlerDoc = null;//from  w  w w. j av  a  2 s .c  o m
    try {
        final ICrawlerContext ctx = this.contextLocal.getCurrentContext();

        // creating a crawler-doc and set some basic properties
        crawlerDoc = ctx.createDocument();
        crawlerDoc.setCrawlerDate(new Date());
        crawlerDoc.setLocation(requestUri);

        FtpUrlConnection ftpConnection = new FtpUrlConnection(requestUri.toURL());
        if (this.connectionTimeout >= 0)
            ftpConnection.setConnectTimeout(this.connectionTimeout);
        if (this.socketTimeout >= 0)
            ftpConnection.setReadTimeout(this.socketTimeout);

        // connect to host
        ftpConnection.connect();

        // get the modification date of the file
        long modTimeStamp = ftpConnection.getLastModified();
        if (modTimeStamp != 0) {
            crawlerDoc.setLastModDate(new Date(modTimeStamp));
        }

        // getting content-type if available
        String contentType = ftpConnection.getContentType();
        if (contentType != null) {
            crawlerDoc.setMimeType(contentType);
        }

        // checking download size limit
        if (this.maxDownloadSize > 0) {
            int contentLength = ftpConnection.getContentLength();
            if (contentLength > this.maxDownloadSize) {
                // reject the document
                final String msg = String.format(
                        "Content-length '%d' of resource '%s' is larger than the max. allowed size of '%d' bytes.",
                        Integer.valueOf(contentLength), requestUri, Integer.valueOf(this.maxDownloadSize));

                this.logger.warn(msg);
                crawlerDoc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE, msg);
                return crawlerDoc;
            }
        }

        final ICrawlerTools crawlerTools = ctx.getCrawlerTools();
        if (ftpConnection.isDirectory()) {
            final FTPFile[] list = ftpConnection.listFiles();
            final Iterator<DirlistEntry> dirlistIt = new DirlistIterator(list);

            // generate & save dir-listing into file
            crawlerTools.saveListing(crawlerDoc, dirlistIt, true, list.length > 50);
        } else {
            // get input stream
            InputStream input = ftpConnection.getInputStream();

            // copy data into file
            crawlerTools.saveInto(crawlerDoc, input);

            // close connection
            input.close();
        }

        // finished
        crawlerDoc.setStatus(ICrawlerDocument.Status.OK);
    } catch (IOException e) {
        if (e instanceof FtpConnectionException) {
            crawlerDoc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE, e.getMessage());
        } else {
            crawlerDoc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE,
                    "Unexpected Exception: " + e.getMessage());
        }

        this.logger.warn(String.format("Unexpected '%s' while trying to crawl resource '%s'.",
                e.getClass().getName(), requestUri), e);
    } catch (URISyntaxException e) {
        this.logger.warn(
                String.format("Unexpected URI syntax exception while converting URL->URI: %s", e.getMessage()));
    }

    return crawlerDoc;
}

From source file:com.searchcode.app.jobs.repository.IndexBaseRepoJob.java

/**
 * Logs to the logs directory a formatted CSV of the supplied list strings
 *///from  w ww  .  ja va 2 s  .c om
private void logIndexed(String repoName, List<String[]> reportList) {
    try {
        CSVWriter writer = new CSVWriter(
                new FileWriter(Singleton.getHelpers().getLogPath() + repoName + ".csv.tmp"));
        writer.writeAll(reportList);
        writer.flush();
        writer.close();

        Path source = Paths.get(Singleton.getHelpers().getLogPath() + repoName + ".csv.tmp");
        Files.move(source, source.resolveSibling(repoName + ".csv"), StandardCopyOption.REPLACE_EXISTING);
    } catch (IOException ex) {
        Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                + " logIndexed for " + repoName + "\n with message: " + ex.getMessage());
    }
}

From source file:org.apache.tez.runtime.library.common.shuffle.HttpConnection.java

/**
 * Connect to source with specific timeout
 * /*from ww w .  j  a  v  a2s  .  c o m*/
 * @param connectionTimeout
 * @return true if connection was successful
 *         false if connection was previously cleaned up
 * @throws IOException upon connection failure
 */
public boolean connect(int connectionTimeout) throws IOException {
    stopWatch.reset().start();
    if (connection == null) {
        setupConnection();
    }
    int unit = 0;
    if (connectionTimeout < 0) {
        throw new IOException("Invalid timeout " + "[timeout = " + connectionTimeout + " ms]");
    } else if (connectionTimeout > 0) {
        unit = Math.min(UNIT_CONNECT_TIMEOUT, connectionTimeout);
    }
    // set the connect timeout to the unit-connect-timeout
    connection.setConnectTimeout(unit);
    int connectionFailures = 0;
    while (true) {
        long connectStartTime = System.currentTimeMillis();
        try {
            connection.connect();
            connectionSucceeed = true;
            break;
        } catch (IOException ioe) {
            // Don't attempt another connect if already cleanedup.
            connectionFailures++;
            if (cleanup) {
                LOG.info("Cleanup is set to true. Not attempting to" + " connect again. Last exception was: ["
                        + ioe.getClass().getName() + ", " + ioe.getMessage() + "]");
                return false;
            }
            // update the total remaining connect-timeout
            connectionTimeout -= unit;
            // throw an exception if we have waited for timeout amount of time
            // note that the updated value if timeout is used here
            if (connectionTimeout <= 0) {
                throw new IOException(
                        "Failed to connect to " + url + ", #connectionFailures=" + connectionFailures, ioe);
            }
            long elapsed = System.currentTimeMillis() - connectStartTime;
            if (elapsed < unit) {
                try {
                    long sleepTime = unit - elapsed;
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("Sleeping for " + sleepTime + " while establishing connection to " + url
                                + ", since connectAttempt returned in " + elapsed + " ms");
                    }
                    Thread.sleep(sleepTime);
                } catch (InterruptedException e) {
                    throw new IOException("Connection establishment sleep interrupted, #connectionFailures="
                            + connectionFailures, e);
                }
            }

            // reset the connect timeout for the last try
            if (connectionTimeout < unit) {
                unit = connectionTimeout;
                // reset the connect time out for the final connect
                connection.setConnectTimeout(unit);
            }

        }
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("Time taken to connect to " + url.toString() + " "
                + stopWatch.elapsedTime(TimeUnit.MILLISECONDS) + " ms; connectionFailures="
                + connectionFailures);
    }
    return true;
}

From source file:com.clustercontrol.notify.util.SendSyslog.java

@Override
public void notify(NotifyRequestMessage requestMessage) throws NotifyNotFound {
    if (m_log.isDebugEnabled()) {
        m_log.debug("notify() " + requestMessage);
    }//www. ja  v  a2s.  co  m
    OutputBasicInfo outputInfo = requestMessage.getOutputInfo();
    String notifyId = requestMessage.getNotifyId();

    if (m_log.isDebugEnabled()) {
        m_log.debug("sendlog() " + outputInfo);
    }
    // ??????
    NotifyLogEscalateInfo logEscalateInfo = QueryUtil.getNotifyLogEscalateInfoPK(notifyId);

    NotifyLogEscalateInfoData escalateInfoData = new NotifyLogEscalateInfoData();
    escalateInfoData.setNotifyId(logEscalateInfo.getNotifyId());

    switch (outputInfo.getPriority()) {
    case PriorityConstant.TYPE_INFO:
        escalateInfoData.setValidFlg(logEscalateInfo.getInfoValidFlg());
        escalateInfoData.setEscalateMessage(logEscalateInfo.getInfoEscalateMessage());
        escalateInfoData.setSyslogPriority(logEscalateInfo.getInfoSyslogPriority());
        escalateInfoData.setSyslogFacility(logEscalateInfo.getInfoSyslogFacility());
        break;
    case PriorityConstant.TYPE_WARNING:
        escalateInfoData.setValidFlg(logEscalateInfo.getWarnValidFlg());
        escalateInfoData.setEscalateMessage(logEscalateInfo.getWarnEscalateMessage());
        escalateInfoData.setSyslogPriority(logEscalateInfo.getWarnSyslogPriority());
        escalateInfoData.setSyslogFacility(logEscalateInfo.getWarnSyslogFacility());
        break;
    case PriorityConstant.TYPE_CRITICAL:
        escalateInfoData.setValidFlg(logEscalateInfo.getCriticalValidFlg());
        escalateInfoData.setEscalateMessage(logEscalateInfo.getCriticalEscalateMessage());
        escalateInfoData.setSyslogPriority(logEscalateInfo.getCriticalSyslogPriority());
        escalateInfoData.setSyslogFacility(logEscalateInfo.getCriticalSyslogFacility());
        break;
    case PriorityConstant.TYPE_UNKNOWN:
        escalateInfoData.setValidFlg(logEscalateInfo.getUnknownValidFlg());
        escalateInfoData.setEscalateMessage(logEscalateInfo.getUnknownEscalateMessage());
        escalateInfoData.setSyslogPriority(logEscalateInfo.getUnknownSyslogPriority());
        escalateInfoData.setSyslogFacility(logEscalateInfo.getUnknownSyslogFacility());
        break;
    default:
        break;
    }

    escalateInfoData.setEscalateFacilityFlg(logEscalateInfo.getEscalateFacilityFlg());
    escalateInfoData.setEscalateFacility(logEscalateInfo.getEscalateFacility());
    escalateInfoData.setEscalatePort(logEscalateInfo.getEscalatePort());
    escalateInfoData.setOwnerRoleId(logEscalateInfo.getNotifyInfoEntity().getOwnerRoleId());

    // syslog??
    String message = getMessage(outputInfo, logEscalateInfo);

    /**
     * 
     */
    List<InetAddress> ipAddresses = getIpAddresses(outputInfo, escalateInfoData);
    if (ipAddresses == null) {
        String detailMsg = "IP Address is empty.";
        m_log.info(detailMsg);
        internalErrorNotify(PriorityConstant.TYPE_CRITICAL, notifyId, MessageConstant.MESSAGE_SYS_007_NOTIFY,
                detailMsg);
    }

    // ?TIMESTAMP?
    SimpleDateFormat sdf = new SimpleDateFormat(HEADER_DATE_FORMAT, Locale.US);
    sdf.setTimeZone(HinemosTime.getTimeZone());
    String headerTimeStamp = sdf.format(HinemosTime.getDateInstance());
    if (m_log.isDebugEnabled()) {
        m_log.debug("sendlog() target message. notifyId = " + escalateInfoData.getNotifyId()
                + ", headerTimeStamp = " + headerTimeStamp + ", facilityId = " + outputInfo.getFacilityId()
                + ", message = " + message);
    }

    for (InetAddress address : ipAddresses) {
        try {
            sendMsgWithRetry(address, escalateInfoData.getEscalatePort(),
                    escalateInfoData.getSyslogPriority() + escalateInfoData.getSyslogFacility(),
                    headerTimeStamp, getSyslogHeaderHost(outputInfo.getFacilityId()), message);
        } catch (IOException e) {
            String detailMsg = e.getMessage() + " IP Address = " + address;
            m_log.info("sendlog() " + detailMsg + " : " + e.getClass().getSimpleName() + ", " + e.getMessage());
            internalErrorNotify(PriorityConstant.TYPE_CRITICAL, notifyId,
                    MessageConstant.MESSAGE_SYS_007_NOTIFY, detailMsg);
            // ???IP??
        }
    }
}

From source file:de.tudarmstadt.lt.lm.app.Ngrams.java

@Override
public void run() {
    _num_ngrams = 0l;/*from  w w  w .  j a va 2 s. c  o  m*/
    _ngram = new FixedSizeFifoLinkedList<>(_order_to);

    _pout = System.out;
    if (!"-".equals(_out.trim())) {
        try {
            if (_out.endsWith(".gz"))
                _pout = new PrintStream(new GZIPOutputStream(new FileOutputStream(new File(_out))));
            else
                _pout = new PrintStream(new FileOutputStream(new File(_out), true));
        } catch (IOException e) {
            LOG.error("Could not open ouput file '{}' for writing.", _out, e);
            System.exit(1);
        }
    }

    try {
        if (_prvdr == null) {
            _prvdr = StartLM.getStringProviderInstance(_provider_type);
            _prvdr.setLanguageModel(new DummyLM<>(_order_to));
        }
    } catch (Exception e) {
        LOG.error("Could not initialize Ngram generator. {}: {}", e.getClass(), e.getMessage(), e);
    }

    if ("-".equals(_file.trim())) {
        LOG.info("Processing text from stdin ('{}').", _file);
        try {
            run(new InputStreamReader(System.in, "UTF-8"), _file);
        } catch (Exception e) {
            LOG.error("Could not generate ngram from from file '{}'.", _file, e);
        }
    } else {

        File f_or_d = new File(_file);
        if (!f_or_d.exists())
            throw new Error(String.format("File or directory '%s' not found.", _file));

        if (f_or_d.isFile()) {
            LOG.info("Processing file '{}'.", f_or_d.getAbsolutePath());
            try {
                run(new InputStreamReader(new FileInputStream(f_or_d), "UTF-8"), _file);
            } catch (Exception e) {
                LOG.error("Could not generate ngrams from file '{}'.", f_or_d.getAbsolutePath(), e);
            }
        }

        if (f_or_d.isDirectory()) {
            File[] txt_files = f_or_d.listFiles(new FileFilter() {
                @Override
                public boolean accept(File f) {
                    return f.isFile() && f.getName().endsWith(".txt");
                }
            });

            for (int i = 0; i < txt_files.length; i++) {
                File f = txt_files[i];
                LOG.info("Processing file '{}' ({}/{}).", f.getAbsolutePath(), i + 1, txt_files.length);
                try {
                    run(new InputStreamReader(new FileInputStream(f), "UTF-8"), f.getAbsolutePath());
                } catch (Exception e) {
                    LOG.error("Could not generate ngrams from file '{}'.", f.getAbsolutePath(), e);
                }
            }
        }
    }
    LOG.info("Generated {} ngrams.", _num_ngrams);
    if (!"-".equals(_out.trim()))
        _pout.close();

}

From source file:org.opencb.opencga.storage.hadoop.variant.VariantHbaseDBAdaptor.java

@Override
public QueryResult<Variant> getAllVariantsByRegionAndStudy(Region region, String sourceId,
        QueryOptions options) {/*www.j  av  a 2s .  co m*/
    Long start, end, dbstart, dbend;
    start = System.currentTimeMillis();
    QueryResult<Variant> queryResult = new QueryResult<>(
            String.format("%s:%d-%d", region.getChromosome(), region.getStart(), region.getEnd()));
    List<Variant> results = new LinkedList<>();

    boolean includeSamples;
    boolean includeStats;
    boolean includeEffects;
    if (!options.containsKey("samples") && !options.containsKey("stats") && !options.containsKey("effects")) {
        includeSamples = true;
        includeStats = true;
        includeEffects = true;
    } else {
        includeSamples = options.containsKey("samples") && options.getBoolean("samples");
        includeStats = options.containsKey("stats") && options.getBoolean("stats");
        includeEffects = options.containsKey("effects") && options.getBoolean("effects");
    }

    try {
        String startRow = buildRowkey(region.getChromosome(), Long.toString(region.getStart()));
        String stopRow = buildRowkey(region.getChromosome(), Long.toString(region.getEnd()));
        HTable table = new HTable(admin.getConfiguration(), tableName);
        dbstart = System.currentTimeMillis();
        Scan regionScan = new Scan(startRow.getBytes(), stopRow.getBytes());
        ResultScanner scanres = table.getScanner(regionScan);
        dbend = System.currentTimeMillis();
        queryResult.setDbTime(dbend - dbstart);

        // Iterate over results and, optionally, their samples and statistics
        for (Result result : scanres) {
            String[] rowkeyParts = new String(result.getRow(), CHARSET_UTF_8).split("_");
            String chromosome = rowkeyParts[0].replaceFirst("^0+(?!$)", "");
            int position = Integer.parseInt(rowkeyParts[1]);

            // Get basic result fields from Protocol Buffers message
            NavigableMap<byte[], byte[]> infoMap = result.getFamilyMap("i".getBytes());
            byte[] byteInfo = infoMap.get((sourceId + "_data").getBytes());
            VariantFieldsProtos.VariantInfo protoInfo = VariantFieldsProtos.VariantInfo.parseFrom(byteInfo);
            String reference = protoInfo.getReference();
            String alternate = StringUtils.join(protoInfo.getAlternateList(), ",");
            String format = StringUtils.join(protoInfo.getFormatList(), ":");
            Variant variant = new Variant(chromosome, position, position, reference, alternate);

            // Set samples if requested
            if (includeSamples) {
                NavigableMap<byte[], byte[]> sampleMap = result.getFamilyMap("d".getBytes());
                Map<String, Map<String, String>> resultSampleMap = new HashMap<>();

                // Set samples
                for (byte[] s : sampleMap.keySet()) {
                    String sampleName = (new String(s, CHARSET_UTF_8)).replaceAll(sourceId + "_", "");
                    VariantFieldsProtos.VariantSample sample = VariantFieldsProtos.VariantSample
                            .parseFrom(sampleMap.get(s));
                    String sample1 = sample.getSample();
                    String[] values = sample1.split(":");
                    String[] fields = format.split(":");
                    Map<String, String> singleSampleMap = new HashMap<>();
                    for (int i = 0; i < fields.length; i++) {
                        singleSampleMap.put(fields[i], values[i]);
                    }
                    // TODO 
                    //                        variant.addSampleData(sampleName, singleSampleMap);
                }
            }

            // Set stats if requested
            if (includeStats) {
                byte[] byteStats = infoMap.get((sourceId + "_stats").getBytes());
                VariantFieldsProtos.VariantStats protoStats = VariantFieldsProtos.VariantStats
                        .parseFrom(byteStats);
                VariantStats variantStats = new VariantStats(chromosome, position, reference, alternate,
                        protoStats.getMaf(), protoStats.getMgf(), protoStats.getMafAllele(),
                        protoStats.getMgfGenotype(), protoStats.getMissingAlleles(),
                        protoStats.getMissingGenotypes(), protoStats.getMendelianErrors(),
                        protoStats.getIsIndel(), protoStats.getCasesPercentDominant(),
                        protoStats.getControlsPercentDominant(), protoStats.getCasesPercentRecessive(),
                        protoStats.getControlsPercentRecessive());
                variant.setStats(variantStats);
            }

            if (includeEffects) {
                QueryResult<VariantEffect> queryEffects = getEffectsByVariant(variant, options);
                variant.setEffect(queryEffects.getResult());
            }

            results.add(variant);
        }
    } catch (IOException e) {
        System.err.println(e.getClass().getName() + ": " + e.getMessage());
    }
    queryResult.setResult(results);
    queryResult.setNumResults(results.size());
    end = System.currentTimeMillis();
    queryResult.setTime(end - start);
    return queryResult;
}

From source file:com.searchcode.app.jobs.repository.IndexBaseRepoJob.java

/**
 * Indexes all the documents in the path provided. Will also remove anything from the index if not on disk
 * Generally this is a slow update used only for the initial clone of a repository
 * NB this can be used for updates but it will be much slower as it needs to to walk the contents of the disk
 *//*  w ww.  j  av a 2s .  c  o m*/
public void indexDocsByPath(Path path, String repoName, String repoLocations, String repoRemoteLocation,
        boolean existingRepo) {

    String fileRepoLocations = FilenameUtils.separatorsToUnix(repoLocations);
    SearchcodeFileVisitor<Path> searchcodeFileVisitor = new SearchcodeFileVisitor<>(this, repoName,
            fileRepoLocations, repoRemoteLocation);

    try {
        if (this.FOLLOWLINKS) {
            Files.walkFileTree(path, EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE,
                    searchcodeFileVisitor);
        } else {
            Files.walkFileTree(path, searchcodeFileVisitor);
        }

    } catch (IOException ex) {
        Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                + " indexDocsByPath walkFileTree\n with message: " + ex.getMessage());
    }

    if (this.LOGINDEXED) {
        logIndexed(repoName, searchcodeFileVisitor.reportList);
    }

    if (existingRepo) {
        CodeSearcher codeSearcher = new CodeSearcher();
        this.cleanMissingPathFiles(codeSearcher, repoName, searchcodeFileVisitor.fileLocationsMap);
    }
}

From source file:com.searchcode.app.jobs.repository.IndexBaseRepoJob.java

/**
 * Checks if a clone operation of a repository was successful. If not
 * then it will delete the folder to start again
 *///from  ww  w  .ja  v a2s.  co  m
public boolean checkCloneSuccess(String repoName, String repoLocations) {
    if (Singleton.getHelpers().isNullEmptyOrWhitespace(repoName)
            && Singleton.getHelpers().isNullEmptyOrWhitespace(repoLocations)) {
        Singleton.getLogger().warning(
                "Repository Location is set to nothing, this can cause searchcode to modify the root file system!");
        return false;
    }

    // Check if sucessfully cloned, and if not delete and restart
    boolean cloneSucess = this.checkCloneUpdateSucess(repoLocations + repoName);
    if (cloneSucess == false) {
        // Delete the folder and delete from the index
        try {
            File filePath = new File(repoLocations + "/" + repoName + "/");
            if (!filePath.getAbsolutePath().equals("/")) { // Lets really be sure....
                FileUtils.deleteDirectory(filePath);
            }
            Singleton.getCodeIndexer().deleteByReponame(repoName);
        } catch (IOException ex) {
            Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                    + "\n with message: " + ex.getMessage());
        }
    }
    // TODO is this correct?!
    this.deleteCloneUpdateSuccess(repoLocations + "/" + repoName);

    return true;
}

From source file:net.modelbased.proasense.storage.fuseki.StorageRegistryFusekiService.java

@GET
@Path("/query/sensor/list")
@Produces(MediaType.APPLICATION_JSON)/*from  w  w w .j a  v a  2 s  .  c  o  m*/
public Response querySensorList(@QueryParam("dataset") String dataset) {
    String FUSEKI_SPARQL_ENDPOINT_URL = getFusekiSparqlEndpointUrl(dataset);

    String SPARQL_SENSOR_LIST = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n"
            + "PREFIX owl: <http://www.w3.org/2002/07/owl#>\n"
            + "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n"
            + "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n"
            + "PREFIX pssn: <http://www.sintef.no/pssn#>\n" + "\n" + "SELECT DISTINCT *\n" + "  WHERE {\n"
            + "    ?sensorId rdf:type <http://purl.oclc.org/NET/ssnx/ssn#Sensor>.\n" + "  }\n"
            + "ORDER BY ASC (?sensorId)";

    QueryExecution qe = QueryExecutionFactory.sparqlService(FUSEKI_SPARQL_ENDPOINT_URL, SPARQL_SENSOR_LIST);
    ResultSet results = qe.execSelect();

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ResultSetFormatter.outputAsJSON(baos, results);

    qe.close();

    String jsonResults = baos.toString();
    jsonResults = jsonResults.replaceAll("http://www.sintef.no/pssn#", "");

    JSONObject jsonResponse = new JSONObject();
    JSONArray sensorArray = new JSONArray();

    ObjectMapper mapper = new ObjectMapper();
    try {
        JsonNode rootNode = mapper.readTree(jsonResults);
        JsonNode resultsNode = rootNode.path("results");
        JsonNode bindingsNode = resultsNode.path("bindings");
        Iterator<JsonNode> iterator = bindingsNode.getElements();
        while (iterator.hasNext()) {
            JsonNode xNode = iterator.next();
            List<String> valueNode = xNode.findValuesAsText("value");

            sensorArray.put(valueNode.get(0));
        }
    } catch (IOException e) {
        System.out.println(e.getClass().getName() + ": " + e.getMessage());
    }

    jsonResponse.put("sensor", sensorArray);

    String result = jsonResponse.toString(2);

    // Return HTTP response 200 in case of success
    return Response.status(200).entity(result).build();
}

From source file:net.modelbased.proasense.storage.fuseki.StorageRegistryFusekiService.java

@GET
@Path("/query/mould/list")
@Produces(MediaType.APPLICATION_JSON)//  w ww.jav  a2  s. c  o m
public Response queryMouldList(@QueryParam("dataset") String dataset) {
    String FUSEKI_SPARQL_ENDPOINT_URL = getFusekiSparqlEndpointUrl(dataset);

    String SPARQL_MOULD_LIST = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n"
            + "PREFIX owl: <http://www.w3.org/2002/07/owl#>\n"
            + "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n"
            + "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n"
            + "PREFIX ssn: <http://purl.oclc.org/NET/ssnx/ssn#>\n"
            + "PREFIX pssn: <http://www.sintef.no/pssn#>\n" + "\n" + "SELECT DISTINCT *\n" + "  WHERE {\n"
            + "    ?subject rdf:type pssn:Mould .\n" + "  }\n" + "ORDER BY ASC (?mouldId)";

    QueryExecution qe = QueryExecutionFactory.sparqlService(FUSEKI_SPARQL_ENDPOINT_URL, SPARQL_MOULD_LIST);
    ResultSet results = qe.execSelect();

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ResultSetFormatter.outputAsJSON(baos, results);

    qe.close();

    String jsonResults = baos.toString();
    jsonResults = jsonResults.replaceAll("http://www.sintef.no/pssn#", "");

    JSONObject jsonResponse = new JSONObject();
    JSONArray mouldArray = new JSONArray();

    ObjectMapper mapper = new ObjectMapper();
    try {
        JsonNode rootNode = mapper.readTree(jsonResults);
        JsonNode resultsNode = rootNode.path("results");
        JsonNode bindingsNode = resultsNode.path("bindings");
        Iterator<JsonNode> iterator = bindingsNode.getElements();
        while (iterator.hasNext()) {
            JsonNode xNode = iterator.next();
            List<String> valueNode = xNode.findValuesAsText("value");

            mouldArray.put(valueNode.get(0));
        }
    } catch (IOException e) {
        System.out.println(e.getClass().getName() + ": " + e.getMessage());
    }

    jsonResponse.put("mould", mouldArray);

    String result = jsonResponse.toString(2);

    // Return HTTP response 200 in case of success
    return Response.status(200).entity(result).build();
}