Example usage for org.apache.solr.util RefCounted decref

List of usage examples for org.apache.solr.util RefCounted decref

Introduction

In this page you can find the example usage for org.apache.solr.util RefCounted decref.

Prototype

public void decref() 

Source Link

Usage

From source file:com.billiger.solr.handler.component.QLTBComponent.java

License:Apache License

/**
 * Inform component of core reload./*ww w. j ava2  s. c om*/
 *
 * This will both set the analyzer according to the configured
 * queryFieldType, and load the QLTB data. Data source can be (in this
 * order) ZooKeeper, the conf/ directory or the data/ directory.
 */
@Override
public final void inform(final SolrCore core) {
    // load analyzer
    String queryFieldType = initArgs.get(FIELD_TYPE);
    if (queryFieldType != null) {
        FieldType ft = core.getLatestSchema().getFieldTypes().get(queryFieldType);
        if (ft == null) {
            throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                    "unknown FieldType \"" + queryFieldType + "\" used in QLTBComponent");
        }
        analyzer = ft.getQueryAnalyzer();
    } else {
        analyzer = null;
    }
    synchronized (qltbCache) {
        qltbCache.clear();
        try {
            // retrieve QLTB data filename
            String qltbFile = initArgs.get(QLTB_FILE);
            if (qltbFile == null) {
                throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                        "QLTBComponent must specify argument: \"" + QLTB_FILE + "\" - path to QLTB data");
            }
            boolean exists = false;
            // check ZooKeeper
            ZkController zkController = core.getCoreDescriptor().getCoreContainer().getZkController();
            if (zkController != null) {
                exists = zkController.configFileExists(zkController.readConfigName(
                        core.getCoreDescriptor().getCloudDescriptor().getCollectionName()), qltbFile);
            } else {
                // no ZooKeeper, check conf/ and data/ directories
                File fConf = new File(core.getResourceLoader().getConfigDir(), qltbFile);
                File fData = new File(core.getDataDir(), qltbFile);
                if (fConf.exists() == fData.exists()) {
                    // both or neither exist
                    throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                            "QLTBComponent missing config file: \"" + qltbFile + "\": either "
                                    + fConf.getAbsolutePath() + " or " + fData.getAbsolutePath()
                                    + " must exist, but not both");
                }
                if (fConf.exists()) {
                    // conf/ found, load it
                    exists = true;
                    log.info("QLTB source conf/: " + fConf.getAbsolutePath());
                    Config cfg = new Config(core.getResourceLoader(), qltbFile);
                    qltbCache.put(null, loadQLTBMap(cfg, core));
                }
            }
            if (!exists) {
                // Neither ZooKeeper nor conf/, so must be in data/
                // We need an IndexReader and the normal
                RefCounted<SolrIndexSearcher> searcher = null;
                try {
                    searcher = core.getNewestSearcher(false);
                    IndexReader reader = searcher.get().getIndexReader();
                    getQLTBMap(reader, core);
                } finally {
                    if (searcher != null) {
                        searcher.decref();
                    }
                }
            }
        } catch (Exception ex) {
            throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error initializing QltbComponent.",
                    ex);
        }
    }
}

From source file:com.grantingersoll.intell.clustering.KMeansClusteringEngine.java

License:Apache License

@Override
public NamedList cluster(SolrParams params) {
    NamedList result = new NamedList();
    //check to see if we have new results
    try {//from  w w  w.  j  a  v a 2s. c o m
        if (theFuture != null) {
            //see if we have new results, but don't wait too long for them
            ClusterJob job = theFuture.get(1, TimeUnit.MILLISECONDS);
            if (lastSuccessful != null) {
                //clean up the old ones
                //TODO: clean up the old dirs before switching lastSuccessful
            }
            lastSuccessful = job;
            theFuture = null;
        } else {

        }

    } catch (InterruptedException e) {
        log.error("Exception", e);
    } catch (ExecutionException e) {
        log.error("Exception", e);
    } catch (TimeoutException e) {
        log.error("Exception", e);
    }
    if (lastSuccessful != null) {//we have clusters
        //do we need the points?
        boolean includePoints = params.getBool(INCLUDE_POINTS, false);
        int clusterId = params.getInt(LIST_POINTS, Integer.MIN_VALUE);
        Map<Integer, List<String>> toPoints = lastSuccessful.clusterIdToPoints;
        String docId = params.get(IN_CLUSTER);
        if ((includePoints || clusterId != Integer.MIN_VALUE || docId != null) && toPoints == null) {
            //load the points
            try {
                toPoints = readPoints(new Path(lastSuccessful.jobDir + File.separator + "points"),
                        lastSuccessful.conf);
            } catch (IOException e) {
                throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                        "Unable to load points: " + lastSuccessful);
            }
        }
        if (params.getBool(LIST_CLUSTERS)) {
            NamedList nl = new NamedList();
            result.add("all", nl);

            Map<Integer, Cluster> clusterMap = lastSuccessful.clusters;
            if (clusterMap == null) {
                //we aren't caching, so load 'em up
                try {
                    clusterMap = loadClusters(lastSuccessful);
                } catch (Exception e) {
                    throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                            "unable to load the clusters from " + lastSuccessful);
                }
            }

            for (Cluster cluster : clusterMap.values()) {
                NamedList clusterNL = new NamedList();
                nl.add(String.valueOf(cluster.getId()), clusterNL);
                clusterNL.add("numPoints", cluster.getNumPoints());
                //TODO: better format?
                clusterNL.add("center", cluster.getCenter().asFormatString());
                if (cluster.getRadius() != null) {
                    clusterNL.add("radius", cluster.getRadius().asFormatString());
                }
                if (includePoints) {
                    List<String> points = toPoints.get(cluster.getId());
                    clusterNL.add("points", points);
                }
            }
        }

        if (docId != null) {

        }
        //TODO: support sending in multiple ids

        if (clusterId != Integer.MIN_VALUE) {
            List<String> points = lastSuccessful.clusterIdToPoints.get(clusterId);
            if (points != null) {
                result.add(String.valueOf(clusterId), points);
            }
        }
    } else if (params.getBool(BUILD, false)) {
        RefCounted<SolrIndexSearcher> refCnt = core.getSearcher();
        int theK = params.getInt(K, 10);
        cluster(refCnt.get(), theK);
        refCnt.decref();
    }
    return result;
}

From source file:examples.adsabs.BlackBoxFailingRecords.java

License:Apache License

public void testImport() throws Exception {

    WaitingDataImportHandler handler = (WaitingDataImportHandler) h.getCore()
            .getRequestHandler("/invenio/import");
    SolrCore core = h.getCore();/*from  w ww . j  a v a 2  s  .  c o m*/

    SolrQueryRequest req = req("command", "full-import", "commit", "true", "url",
            "python://search?p=" + URLEncoder.encode("recid:1->104", "UTF-8"));
    SolrQueryResponse rsp = new SolrQueryResponse();
    core.execute(handler, req, rsp);

    assertU(commit());
    assertQ(req("q", "*:*", "fl", "recid,title"), "//*[@numFound='22']");
    assertQ(req("q", "id:84"), "//*[@numFound='1']");
    assertQ(req("q", "id:78"), "//*[@numFound='1']");
    assertQ(req("q", "abstract:\"Hubbard-Stratonovich\""), "//*[@numFound='1']");

    // clean the slate
    assertU(delQ("*:*"));
    assertU(commit());
    assertQ(req("q", "*:*"), "//*[@numFound='0']");

    failThis.put("78", true);
    failThis.put("84", true);

    req = req("command", "full-import", "commit", "true", "writerImpl", TestFailingWriter.class.getName(),
            "url", "python://search?p=" + URLEncoder.encode("recid:1->60 OR recid:61->104", "UTF-8"));
    rsp = new SolrQueryResponse();
    core.execute(handler, req, rsp);

    assertQ(req("qt", "/invenio-doctor", "command", "info"), "//str[@name='queueSize'][.='3']",
            "//str[@name='failedRecs'][.='0']", "//str[@name='failedBatches'][.='0']",
            "//str[@name='failedTotal'][.='0']", "//str[@name='registeredRequests'][.='3']",
            "//str[@name='restartedRequests'][.='0']", "//str[@name='docsToCheck'][.='103']",
            "//str[@name='status'][.='idle']");
    assertQ(req("qt", "/invenio-doctor", "command", "detailed-info"), "//str[@name='queueSize'][.='3']",
            "//str[@name='failedRecs'][.='0']", "//str[@name='failedBatches'][.='0']",
            "//str[@name='failedTotal'][.='0']", "//str[@name='registeredRequests'][.='3']",
            "//str[@name='restartedRequests'][.='0']", "//str[@name='docsToCheck'][.='103']",
            "//str[@name='status'][.='idle']", "*[count(//arr[@name='toBeDone']/str)=3]",
            "*[count(//arr[@name='failedBatches']/str)=0]");

    InvenioDoctor doctor = (InvenioDoctor) h.getCore().getRequestHandler("/invenio-doctor");
    req = req("command", "start");
    rsp = new SolrQueryResponse();
    core.execute(doctor, req, rsp);

    while (doctor.isBusy()) {
        assertQ(req("qt", "/invenio-doctor", "command", "info"), "//str[@name='status'][.='busy']");
        Thread.sleep(300);
    }

    assertQ(req("qt", "/invenio-doctor", "command", "info"), "//str[@name='status'][.='idle']");

    assertQ(req("q", "*:*"), "//*[@numFound='20']");
    assertQ(req("q", "id:84"), "//*[@numFound='0']");
    assertQ(req("q", "id:83"), "//*[@numFound='1']");
    assertQ(req("q", "id:85"), "//*[@numFound='1']");

    assertQ(req("q", "id:78"), "//*[@numFound='0']");
    assertQ(req("q", "id:77"), "//*[@numFound='1']");
    assertQ(req("q", "id:79"), "//*[@numFound='1']");

    String response = h.query("/invenio-doctor", req("qt", "/invenio-doctor", "command", "detailed-info"));

    //System.out.println(response);

    assertQ(req("qt", "/invenio-doctor", "command", "info"), "//str[@name='queueSize'][.='0']",
            "//str[@name='failedRecs'][.='2']", "//str[@name='failedBatches'][.='0']",
            "//str[@name='failedTotal'][.='2']", "//str[@name='registeredRequests'][.='21']",
            "//str[@name='restartedRequests'][.='21']", "//str[@name='docsToCheck'][.='0']",
            "//str[@name='status'][.='idle']");

    assertQ(req("qt", "/invenio-doctor", "command", "reset"));
    assertQ(req("qt", "/invenio-doctor", "command", "info"), "//str[@name='queueSize'][.='0']",
            "//str[@name='failedRecs'][.='0']");

    // now we expect the least possible number of restarts
    req = req("command", "full-import", "commit", "true", "writerImpl", TestFailingWriter.class.getName(),
            "url", "python://search?p=" + URLEncoder.encode("recid:82->86", "UTF-8"));
    rsp = new SolrQueryResponse();
    core.execute(handler, req, rsp);

    assertQ(req("qt", "/invenio-doctor", "command", "info"), "//str[@name='queueSize'][.='3']",
            "//str[@name='failedRecs'][.='0']", "//str[@name='failedBatches'][.='0']",
            "//str[@name='failedTotal'][.='0']", "//str[@name='registeredRequests'][.='3']",
            "//str[@name='restartedRequests'][.='0']", "//str[@name='docsToCheck'][.='3']",
            "//str[@name='status'][.='idle']");

    req = req("command", "start");
    rsp = new SolrQueryResponse();
    core.execute(doctor, req, rsp);

    while (doctor.isBusy()) {
        Thread.sleep(300);
    }

    assertQ(req("qt", "/invenio-doctor", "command", "info"), "//str[@name='queueSize'][.='0']",
            "//str[@name='failedRecs'][.='1']", "//str[@name='failedBatches'][.='0']",
            "//str[@name='failedTotal'][.='1']", "//str[@name='registeredRequests'][.='3']",
            "//str[@name='restartedRequests'][.='3']", "//str[@name='docsToCheck'][.='0']",
            "//str[@name='status'][.='idle']");

    // now test the new component which looks inside the invenio db
    // discovers the missing recs and calls indexing on them

    assertU(commit());
    assertQ(req("q", "recid:77 OR recid:80"), "//*[@numFound='2']");
    assertU(delQ("recid:77 OR recid:80"));

    // this is necessary, otherwise the results may be wrong because
    // lucene cache is used to compare records (and the cache may
    // contain deleted records even after the commit)
    assertU(commit("expungeDeletes", "true"));
    assertQ(req("q", "recid:77 OR recid:80"), "//*[@numFound='0']");

    assertQ(req("qt", "/invenio-doctor", "command", "info"), "//str[@name='queueSize'][.='0']",
            "//str[@name='failedRecs'][.='1']", "//str[@name='failedBatches'][.='0']",
            "//str[@name='failedTotal'][.='1']", "//str[@name='registeredRequests'][.='3']",
            "//str[@name='restartedRequests'][.='3']", "//str[@name='docsToCheck'][.='0']",
            "//str[@name='status'][.='idle']");

    req = req("command", "discover", "params", "batchSize=50");
    rsp = new SolrQueryResponse();
    core.execute(doctor, req, rsp);

    while (doctor.isBusy()) {
        Thread.sleep(300);
    }

    req = req("command", "start");
    rsp = new SolrQueryResponse();
    core.execute(doctor, req, rsp);

    while (doctor.isBusy()) {
        Thread.sleep(300);
    }

    assertU(commit());

    assertQ(req("q", "recid:77 OR recid:80"), "//*[@numFound='2']");

    assertQ(req("qt", "/invenio-doctor", "command", "info"), "//str[@name='queueSize'][.='0']",
            "//str[@name='failedRecs'][.='1']", "//str[@name='failedBatches'][.='0']",
            "//str[@name='failedTotal'][.='1']", "//str[@name='registeredRequests'][.='7']",
            "//str[@name='restartedRequests'][.='7']", "//str[@name='docsToCheck'][.='0']",
            "//str[@name='status'][.='idle']");

    assertQ(req("qt", "/invenio-doctor", "command", "show-missing"), "//str[@name='queueSize'][.='0']",
            "//str[@name='failedRecs'][.='1']", "//str[@name='failedBatches'][.='0']",
            "//str[@name='failedTotal'][.='1']", "//str[@name='registeredRequests'][.='7']",
            "//str[@name='restartedRequests'][.='7']", "//str[@name='docsToCheck'][.='0']",
            "//str[@name='status'][.='idle']", "//arr[@name='missingRecs']/int[.='77']",
            "//arr[@name='missingRecs']/int[.='80']");

    assertQ(req("qt", "/invenio-doctor", "command", "reset"), null);
    assertQ(req("qt", "/invenio-doctor", "command", "discover", "params",
            "batchSize=1&fetch_size=2&max_records=1"), null);
    req = req("command", "start");
    rsp = new SolrQueryResponse();
    core.execute(doctor, req, rsp);

    while (doctor.isBusy()) {
        Thread.sleep(300);
    }

    assertU(commit());
    assertQ(req("q", "recid:77 OR recid:80"), "//*[@numFound='2']");

    /*
    assertQ(req("qt", "/invenio-doctor", "command", "detailed-info"), 
      "//str[@name='registeredRequests'][.='9']",
      "//str[@name='restartedRequests'][.='9']",
      "//str[@name='status'][.='idle']"
    );
     */

    // verify that deleted recs are discovered
    //MontySolrVM.INSTANCE.evalCommand("sys.stderr.write(str(self._handler._db['*:create_record'].__module__))");
    //MontySolrVM.INSTANCE.evalCommand("sys.stderr.write(sys.modules['monty_invenio.tests.demotest_updating'].__file__ + '\\n')");
    PythonMessage message = MontySolrVM.INSTANCE.createMessage("create_record").setParam("diff", 5)
            .setParam("data", "970:a:bibcodeXXXXXXXXXXXX");
    MontySolrVM.INSTANCE.sendMessage(message);
    Integer added = (Integer) message.getResults();
    tempRecids.add(added);

    assertQ(req("qt", "/invenio-doctor", "command", "discover"), null);
    req = req("command", "start");
    rsp = new SolrQueryResponse();
    core.execute(doctor, req, rsp);
    while (doctor.isBusy()) {
        Thread.sleep(300);
    }

    assertU(commit());
    assertQ(req("q", "recid:" + added), "//*[@numFound='1']");

    message = MontySolrVM.INSTANCE.createMessage("delete_record").setParam("recid", added).setParam("diff", 7);
    MontySolrVM.INSTANCE.sendMessage(message);

    assertQ(req("qt", "/invenio-doctor", "command", "discover"), null);
    req = req("command", "start");
    rsp = new SolrQueryResponse();
    core.execute(doctor, req, rsp);
    while (doctor.isBusy()) {
        Thread.sleep(300);
    }

    assertU(commit());
    assertQ(req("q", "recid:" + added), "//*[@numFound='0']");

    // now delete records inside solr and see whether the doctor can
    // discover them and recover them

    assertU(delQ("*:*"));
    assertU(commit());
    assertQ(req("q", "*:*"), "//*[@numFound='0']");

    failThis.clear();

    req = req("command", "discover");
    rsp = new SolrQueryResponse();
    core.execute(doctor, req, rsp);

    while (doctor.isBusy()) {
        Thread.sleep(300);
    }

    req = req("command", "start");
    rsp = new SolrQueryResponse();
    core.execute(doctor, req, rsp);

    while (doctor.isBusy()) {
        Thread.sleep(300);
    }

    assertU(commit());

    assertQ(req("q", "*:*"), "//*[@numFound='22']");

    // check that force-reindexing will update recs
    RefCounted<SolrIndexSearcher> searcher = h.getCore().getSearcher();
    SolrIndexSearcher s = searcher.get();
    Document doc77 = s.doc(s.search(new TermQuery(new Term("recid", "77")), 1).scoreDocs[0].doc);
    String is = doc77.get("indexstamp");

    req = req("command", "force-reindexing");
    rsp = new SolrQueryResponse();
    core.execute(doctor, req, rsp);

    while (doctor.isBusy()) {
        Thread.sleep(300);
    }

    req = req("command", "start");
    rsp = new SolrQueryResponse();
    core.execute(doctor, req, rsp);

    while (doctor.isBusy()) {
        Thread.sleep(300);
    }
    assertU(commit());

    Document doc77b = s.doc(s.search(new TermQuery(new Term("recid", "77")), 1).scoreDocs[0].doc);
    String is2 = doc77b.get("indexstamp");

    assertQ(req("q", "*:*"), "//*[@numFound='22']");
    assertTrue("Docs were not re-indexed", !is.equals(is2));

    s.close();
    searcher.decref();

}

From source file:jp.sf.fess.solr.plugin.update.DocValueUpdateHandlerFilter.java

License:Apache License

@Override
public int addDoc(final AddUpdateCommand cmd, final UpdateHandlerFilterChain chain) throws IOException {
    final String exCmd = cmd.getReq().getParams().get(EXTENDED_CMD);
    if (UPDATE_CMD.equals(exCmd)) {
        final String termName = cmd.getReq().getParams().get(TERM_PARAM);
        if (termName == null) {
            throw new IllegalArgumentException("term is not specified.");
        }/*w ww  .  j a v a  2 s  .  c  o  m*/

        int rc = -1;
        final RefCounted<IndexWriter> iw = updateHandler.getSolrCoreState()
                .getIndexWriter(updateHandler.getSolrCore());
        try {
            final IndexWriter writer = iw.get();

            if (cmd.isBlock()) {
                for (final Iterable<? extends IndexableField> doc : cmd) {
                    updateNumericValue(writer, doc, termName);
                }
            } else {
                final Iterable<IndexableField> doc = cmd.getLuceneDocument();
                updateNumericValue(writer, doc, termName);
            }
            rc = 1;
        } finally {
            iw.decref();
        }
        return rc;
    } else {
        return chain.addDoc(cmd);
    }
}

From source file:net.yacy.cora.federate.solr.connector.EmbeddedSolrConnector.java

License:Open Source License

@Override
public long getSize() {
    RefCounted<SolrIndexSearcher> refCountedIndexSearcher = this.core.getSearcher();
    SolrIndexSearcher searcher = refCountedIndexSearcher.get();
    DirectoryReader reader = searcher.getIndexReader();
    long numDocs = reader.numDocs();
    refCountedIndexSearcher.decref();
    return numDocs;
}

From source file:org.alfresco.solr.AbstractAlfrescoSolrTests.java

License:Open Source License

public static void waitForDocCount(Query query, long expectedNumFound, long waitMillis) throws Exception {
    Date date = new Date();
    long timeout = (long) date.getTime() + waitMillis;

    RefCounted<SolrIndexSearcher> ref = null;
    int totalHits = 0;
    while (new Date().getTime() < timeout) {
        try {//from   w  ww  . ja v  a  2s .c  om
            ref = getCore().getSearcher();
            SolrIndexSearcher searcher = ref.get();
            TopDocs topDocs = searcher.search(query, 10);
            totalHits = topDocs.totalHits;
            if (topDocs.totalHits == expectedNumFound) {
                return;
            } else {
                Thread.sleep(2000);
            }
        } finally {
            ref.decref();
        }
    }
    throw new Exception(
            "Wait error expected " + expectedNumFound + " found " + totalHits + " : " + query.toString());
}

From source file:org.alfresco.solr.AlfrescoCoreAdminTester.java

License:Open Source License

private void checkAuth(SolrQueryResponse rsp, SolrCore core, AlfrescoSolrDataModel dataModel, long count)
        throws IOException, org.apache.lucene.queryparser.classic.ParseException {
    NamedList<Object> report = new SimpleOrderedMap<Object>();
    rsp.add("Auth", report);
    RefCounted<SolrIndexSearcher> refCounted = null;
    try {/*from  ww  w  .j  a  v a  2  s . c  om*/
        refCounted = core.getSearcher(false, true, null);
        SolrIndexSearcher solrIndexSearcher = refCounted.get();

        testFTSQuery(dataModel, report, solrIndexSearcher, "TEXT:\"Test\"", (int) count, null, null, null);
        testFTSQuery(dataModel, report, solrIndexSearcher, "TEXT:\"doc\"", (int) count, null, null, null);
        testFTSQuery(dataModel, report, solrIndexSearcher, "TEXT:\"number\"", (int) count, null, null, null);
        testFTSQuery(dataModel, report, solrIndexSearcher, "TEXT:\"1\"", (int) count, null, null, null);

        testFTSQuery(dataModel, report, solrIndexSearcher, "AUTHORITY:\"READER-0\"", (int) count, null, null,
                null);

        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 8);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 9);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 10);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 98);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 99);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 100);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 998);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 999);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 1000);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 9998);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 9999);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 10000);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 10000);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 10000);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 20000);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 20000);
        buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 20000);
        // buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 100000);
        // buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 1000000);
        // buildAndRunAuthQuery(dataModel, count, report, solrIndexSearcher, 10000000);
    } finally {
        if (refCounted != null) {
            refCounted.decref();
        }
    }
}

From source file:org.alfresco.solr.AlfrescoCoreAdminTester.java

License:Open Source License

/**
 * @param after/*from w  ww . j a v a2 s  . c  om*/
 * @param core
 * @param dataModel
 * @throws IOException
 * @throws org.apache.lucene.queryparser.classic.ParseException 
 */
private void testChildNameEscaping(NamedList<Object> after, SolrCore core, AlfrescoSolrDataModel dataModel,
        NodeRef rootNodeRef) throws IOException, org.apache.lucene.queryparser.classic.ParseException {
    String COMPLEX_LOCAL_NAME = "\u0020\u0060\u00ac\u00a6\u0021\"\u00a3\u0024\u0025\u005e\u0026\u002a\u0028\u0029\u002d\u005f\u003d\u002b\t\n\\\u0000\u005b\u005d\u007b\u007d\u003b\u0027\u0023\u003a\u0040\u007e\u002c\u002e\u002f\u003c\u003e\u003f\\u007c\u005f\u0078\u0054\u0036\u0035\u0041\u005f";
    String NUMERIC_LOCAL_NAME = "12Woof12";

    NodeRef childNameEscapingNodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
    QName childNameEscapingQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI,
            COMPLEX_LOCAL_NAME);
    QName pathChildNameEscapingQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI,
            ISO9075.encode(COMPLEX_LOCAL_NAME));
    ChildAssociationRef complexCAR = new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef,
            childNameEscapingQName, childNameEscapingNodeRef, true, 0);
    addNode(core, dataModel, 1, 17, 1, testSuperType, null, null, null, "system",
            new ChildAssociationRef[] { complexCAR }, new NodeRef[] { rootNodeRef },
            new String[] { "/" + pathChildNameEscapingQName.toString() }, childNameEscapingNodeRef, true);

    NodeRef numericNameEscapingNodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
    QName numericNameEscapingQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI,
            NUMERIC_LOCAL_NAME);
    QName pathNumericNameEscapingQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI,
            ISO9075.encode(NUMERIC_LOCAL_NAME));
    ChildAssociationRef numericCAR = new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef,
            numericNameEscapingQName, numericNameEscapingNodeRef, true, 0);
    addNode(core, dataModel, 1, 18, 1, testSuperType, null, null, null, "system",
            new ChildAssociationRef[] { numericCAR }, new NodeRef[] { rootNodeRef },
            new String[] { "/" + pathNumericNameEscapingQName.toString() }, numericNameEscapingNodeRef, true);

    NamedList<Object> report = new SimpleOrderedMap<Object>();
    after.add("TestChildNameEscaping", report);
    RefCounted<SolrIndexSearcher> refCounted = null;
    try {
        refCounted = core.getSearcher(false, true, null);
        SolrIndexSearcher solrIndexSearcher = refCounted.get();

        testQuery(dataModel, report, solrIndexSearcher,
                "PATH:\"/cm:" + ISO9075.encode(COMPLEX_LOCAL_NAME) + "\"", 1);
        testQuery(dataModel, report, solrIndexSearcher,
                "PATH:\"/cm:" + ISO9075.encode(NUMERIC_LOCAL_NAME) + "\"", 1);
    } finally {
        if (refCounted != null) {
            refCounted.decref();
        }
    }
}

From source file:org.alfresco.solr.AlfrescoCoreAdminTester.java

License:Open Source License

private void checkRootNode(NamedList<Object> before, SolrCore core, AlfrescoSolrDataModel dataModel)
        throws IOException, org.apache.lucene.queryparser.classic.ParseException {
    NamedList<Object> report = new SimpleOrderedMap<Object>();
    before.add("RootNode", report);
    RefCounted<SolrIndexSearcher> refCounted = null;
    try {//from   w ww.j a  v  a  2s  . c  o  m
        refCounted = core.getSearcher(false, true, null);
        SolrIndexSearcher solrIndexSearcher = refCounted.get();

        testQuery(dataModel, report, solrIndexSearcher, "PATH:\"/\"", 1);
        testQuery(dataModel, report, solrIndexSearcher, "PATH:\"/.\"", 1);
    } finally {
        if (refCounted != null) {
            refCounted.decref();
        }
    }
}

From source file:org.alfresco.solr.AlfrescoCoreAdminTester.java

License:Open Source License

private void checkQNames(NamedList<Object> before, SolrCore core, AlfrescoSolrDataModel dataModel)
        throws IOException, org.apache.lucene.queryparser.classic.ParseException {
    NamedList<Object> report = new SimpleOrderedMap<Object>();
    before.add("QNames", report);
    RefCounted<SolrIndexSearcher> refCounted = null;
    try {//ww w.j av a2s.co m
        refCounted = core.getSearcher(false, true, null);
        SolrIndexSearcher solrIndexSearcher = refCounted.get();

        testQuery(dataModel, report, solrIndexSearcher, "QNAME:\"nine\"", 1);
        testQuery(dataModel, report, solrIndexSearcher, "PRIMARYASSOCTYPEQNAME:\"cm:contains\"", 11);
        testQuery(dataModel, report, solrIndexSearcher, "PRIMARYASSOCTYPEQNAME:\"sys:children\"", 4);
        testQuery(dataModel, report, solrIndexSearcher, "ASSOCTYPEQNAME:\"cm:contains\"", 11);
        testQuery(dataModel, report, solrIndexSearcher, "ASSOCTYPEQNAME:\"sys:children\"", 5);
    } finally {
        if (refCounted != null) {
            refCounted.decref();
        }
    }
}