List of usage examples for org.apache.solr.client.solrj.impl HttpSolrClient close
@Override public void close() throws IOException
From source file:com.fline.hadoop.data.client.DataTransporter.java
License:Apache License
/** * trans rdb data to hbase./*from w w w .j a va 2 s . c o m*/ * * @param connectionurl * rdb url * @param driver * rdb driver class. such as "com.mysql.jdbc.Driver" * @param username * rdb login username * @param password * rdb login password * @param tablename * rdb table. * @param rdbcolumns * rdb table column which selected to write to hbase * @param partitioncolumn * the column which can be used to select data by set a start * value and end value * @param linenum * record num * @param hbasetable * output hbase table name * @param hbasecolumns * the columns corresponding to rdb columns * @param rowkeyparam * hbase rowkey generate param. * @param solrmasterurl * such as http://fdp-master:8983/solr/ * @param label * solr label used for search * @param listener * @throws Exception */ public static void transRDB2HBASEWithIndexOnSolr(String connectionurl, String driver, String username, String password, String tablename, String rdbcolumns, String partitioncolumn, int linenum, String hbasetable, String hbasecolumns, String rowkeyparam, String solrmasterurl, String label, DataProgressListener listener) throws Exception { HashMap<String, String> rdbconfigMap = new HashMap<String, String>(); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_CONNECTIONSTRING, connectionurl); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_DRIVER, driver); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_USERNAME, username); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_USERPASSWD, password); // configMap.put(RDBInputDataSource.CONFIG_JDBC_SCHEMA, ""); rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_COLUMNS, rdbcolumns); // rdbconfigMap.put(CONFIG_MAPNUM, "2000"); // rdbconfigMap.put("fromJobConfig.sql", "1=1 limit " + linenum); rdbconfigMap.put(CONFIG_MAPNUM, String.valueOf(linenum / 1000 + 1)); System.out.println("config_mapnum*********************:" + linenum); if (linenum <= 0) { rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_TABLE, tablename); } else { rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_TABLE, "(select * from " + tablename + " limit " + linenum + " ) as temptable"); } rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_PARTITIONCOLUMN, partitioncolumn); InputDataSource rdb = InputDataSourceFactory.createInputDataSource(Constant.RDBS_DATASOURCE, rdbconfigMap); // HBASE CONFIG HashMap<String, String> hbaseconfigMap = new HashMap<String, String>(); Configuration conf = new Configuration(); conf.addResource(new FileInputStream( DataTransporter.class.getClassLoader().getResource("").getPath() + "hbase-site.xml")); hbaseconfigMap.put(HBaseDataSource.CONFIG_HBASE_ZOOKEEPERLIST, conf.get("hbase.zookeeper.quorum")); hbaseconfigMap.put(HBaseDataSource.CONFIG_HBASE_ZKNODE, conf.get("zookeeper.znode.parent")); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_TABLENAME, hbasetable); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_COLUMNSMAP, hbasecolumns); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_ROWKEYGENEREATEDWAY, HBaseOutputDataSource.ROWKEY_GENERATED_BY_NORMAL); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_ROWKEYPARAMS, rowkeyparam); OutputDataSource hbase = OutputDataSourceFactory.createOutputDataSource(Constant.HBASE_DATASOURCE, hbaseconfigMap); // solr meta store HttpSolrClient metaclient = new HttpSolrClient(solrmasterurl + "core_for_Meta"); List<SolrInputDocument> sidocs = new ArrayList<SolrInputDocument>(); SolrInputDocument sidoc = new SolrInputDocument(); sidoc.addField("rdbtablename", tablename); sidoc.addField("rdbtablecols", generateDynamicSolrColumns(rdbcolumns)); sidocs.add(sidoc); metaclient.add(sidocs); metaclient.commit(); metaclient.close(); // solr config String rdbname = null; if (driver.contains("mysql")) { rdbname = connectionurl.substring(connectionurl.lastIndexOf('/') + 1); } else if (driver.contains("oracle")) { rdbname = connectionurl.substring(connectionurl.lastIndexOf(':') + 1); } HashMap<String, String> solrconfig = new HashMap<String, String>(); solrconfig.put(SolrDataSource.CONFIG_SOLR_MASTERURL, solrmasterurl); solrconfig.put(SolrOutputDataSource.CONFIG_SOLR_COLUMNS, "label=" + label + ",linecount=" + linenum + ",rdbname=" + rdbname + ",rdbtablename=" + hbasetable + ",createdTime=" + System.currentTimeMillis() + ",sourceType=2@di_v@" + generateDynamicSolrColumns(rdbcolumns)); System.out.println(SolrOutputDataSource.CONFIG_SOLR_COLUMNS + "\t" + "label=" + label + ",linecount=" + linenum + ",rdbname=" + rdbname + ",rdbtablename=" + tablename + ",createdTime=" + System.currentTimeMillis() + ",sourceType=2@di_v@" + generateDynamicSolrColumns(rdbcolumns)); solrconfig.put(SolrOutputDataSource.CONFIG_SOLR_INSTANCE, "core_for_searchDB"); OutputDataSource solr = new SolrOutputDataSource(solrconfig, Constant.SOLR_DATASOURCE_NAME); DataTransporter.createTransJob(rdb, hbase, null); DataTransporter.createTransJob(rdb, solr, listener); }
From source file:com.fline.hadoop.data.client.DataTransporter.java
License:Apache License
/** * trans rdb data to hbase with incre condition . * //from ww w.jav a2s .co m * @param connectionurl * rdb url * @param driver * rdb driver class. such as "com.mysql.jdbc.Driver" * @param username * rdb login username * @param password * rdb login password * @param tablename * rdb table. * @param rdbcolumns * rdb table column which selected to write to hbase * @param partitioncolumn * the column which can be used to select data by set a start * value and end value * @param increCheckColumn * incre check column. * @param increLastValue * start column value. * @param linenum * record num * @param hbasetable * output hbase table name * @param hbasecolumns * the columns corresponding to rdb columns * @param rowkeyparam * hbase rowkey generate param. * @param solrmasterurl * such as http://fdp-master:8983/solr/ * @param label * solr label used for search * @param listener * @throws Exception */ public static void transRDBIncre2HBASEWithIndexOnSolr(String connectionurl, String driver, String username, String password, String tablename, String rdbcolumns, String partitioncolumn, String increCheckColumn, String increLastValue, int linenum, String hbasetable, String hbasecolumns, String rowkeyparam, String solrmasterurl, String label, DataProgressListener listener) throws Exception { HashMap<String, String> rdbconfigMap = new HashMap<String, String>(); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_CONNECTIONSTRING, connectionurl); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_DRIVER, driver); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_USERNAME, username); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_USERPASSWD, password); // configMap.put(RDBInputDataSource.CONFIG_JDBC_SCHEMA, ""); rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_COLUMNS, rdbcolumns); rdbconfigMap.put("fromJobConfig.boundaryQuery", "select min(" + partitioncolumn + "),max(" + partitioncolumn + ") from " + tablename + " where " + increCheckColumn + " >= " + increLastValue); rdbconfigMap.put(CONFIG_MAPNUM, String.valueOf(linenum / 1000 + 1)); System.out.println("config_mapnum*********************:" + linenum); if (linenum <= 0) { rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_TABLE, tablename); } else { rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_TABLE, "(select * from " + tablename + " limit " + linenum + " ) as temptable"); } rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_PARTITIONCOLUMN, partitioncolumn); InputDataSource rdb = InputDataSourceFactory.createInputDataSource(Constant.RDBS_DATASOURCE, rdbconfigMap); // HBASE CONFIG HashMap<String, String> hbaseconfigMap = new HashMap<String, String>(); Configuration conf = new Configuration(); conf.addResource(new FileInputStream( DataTransporter.class.getClassLoader().getResource("").getPath() + "hbase-site.xml")); hbaseconfigMap.put(HBaseDataSource.CONFIG_HBASE_ZOOKEEPERLIST, conf.get("hbase.zookeeper.quorum")); hbaseconfigMap.put(HBaseDataSource.CONFIG_HBASE_ZKNODE, conf.get("zookeeper.znode.parent")); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_TABLENAME, hbasetable); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_COLUMNSMAP, hbasecolumns); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_ROWKEYGENEREATEDWAY, HBaseOutputDataSource.ROWKEY_GENERATED_BY_NORMAL); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_ROWKEYPARAMS, rowkeyparam); OutputDataSource hbase = OutputDataSourceFactory.createOutputDataSource(Constant.HBASE_DATASOURCE, hbaseconfigMap); // solr meta store HttpSolrClient metaclient = new HttpSolrClient(solrmasterurl + "core_for_Meta"); List<SolrInputDocument> sidocs = new ArrayList<SolrInputDocument>(); SolrInputDocument sidoc = new SolrInputDocument(); sidoc.addField("rdbtablename", tablename); sidoc.addField("rdbtablecols", generateDynamicSolrColumns(rdbcolumns)); sidocs.add(sidoc); metaclient.add(sidocs); metaclient.commit(); metaclient.close(); // solr config HashMap<String, String> solrconfig = new HashMap<String, String>(); solrconfig.put(SolrDataSource.CONFIG_SOLR_MASTERURL, solrmasterurl); solrconfig.put(SolrOutputDataSource.CONFIG_SOLR_COLUMNS, "label=" + label + ",linecount=" + linenum + ",rdbname=" + connectionurl.substring(connectionurl.lastIndexOf('/')) + ",rdbtablename=" + tablename + ",createdTime=" + System.currentTimeMillis() + ",sourceType=2@di_v@" + generateDynamicSolrColumns(rdbcolumns)); solrconfig.put(SolrOutputDataSource.CONFIG_SOLR_INSTANCE, "core_for_searchDB"); OutputDataSource solr = new SolrOutputDataSource(solrconfig, Constant.SOLR_DATASOURCE_NAME); DataTransporter.createTransJob(rdb, hbase, listener); DataTransporter.createTransJob(rdb, solr, null); }
From source file:com.ngdata.hbaseindexer.mr.GoLive.java
License:Apache License
public boolean goLive(HBaseIndexingOptions options, FileStatus[] outDirs) { LOG.info("Live merging of output shards into Solr cluster..."); boolean success = false; long start = System.currentTimeMillis(); int concurrentMerges = options.goLiveThreads; ThreadPoolExecutor executor = new ThreadPoolExecutor(concurrentMerges, concurrentMerges, 1, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>()); try {/*w ww . j a v a2 s . c om*/ CompletionService<Request> completionService = new ExecutorCompletionService<Request>(executor); Set<Future<Request>> pending = new HashSet<Future<Request>>(); int cnt = -1; for (final FileStatus dir : outDirs) { LOG.debug("processing: " + dir.getPath()); cnt++; List<String> urls = options.shardUrls.get(cnt); for (String url : urls) { String baseUrl = url; if (baseUrl.endsWith("/")) { baseUrl = baseUrl.substring(0, baseUrl.length() - 1); } int lastPathIndex = baseUrl.lastIndexOf("/"); if (lastPathIndex == -1) { LOG.error("Found unexpected shardurl, live merge failed: " + baseUrl); return false; } final String name = baseUrl.substring(lastPathIndex + 1); baseUrl = baseUrl.substring(0, lastPathIndex); final String mergeUrl = baseUrl; Callable<Request> task = new Callable<Request>() { @Override public Request call() { Request req = new Request(); LOG.info("Live merge " + dir.getPath() + " into " + mergeUrl); final HttpSolrClient server = new HttpSolrClient.Builder(mergeUrl).build(); try { CoreAdminRequest.MergeIndexes mergeRequest = new CoreAdminRequest.MergeIndexes(); mergeRequest.setCoreName(name); mergeRequest.setIndexDirs(Arrays.asList(dir.getPath().toString() + "/data/index")); try { mergeRequest.process(server); req.success = true; } catch (SolrServerException e) { req.e = e; return req; } catch (IOException e) { req.e = e; return req; } } finally { try { server.close(); } catch (java.io.IOException e) { throw new RuntimeException(e); } } return req; } }; pending.add(completionService.submit(task)); } } while (pending != null && pending.size() > 0) { try { Future<Request> future = completionService.take(); if (future == null) break; pending.remove(future); try { Request req = future.get(); if (!req.success) { // failed LOG.error("A live merge command failed", req.e); return false; } } catch (ExecutionException e) { LOG.error("Error sending live merge command", e); return false; } } catch (InterruptedException e) { Thread.currentThread().interrupt(); LOG.error("Live merge process interrupted", e); return false; } } cnt = -1; try { LOG.info("Committing live merge..."); if (options.zkHost != null) { CloudSolrClient server = new CloudSolrClient.Builder().withZkHost(options.zkHost).build(); server.setDefaultCollection(options.collection); server.commit(); server.close(); } else { for (List<String> urls : options.shardUrls) { for (String url : urls) { // TODO: we should do these concurrently HttpSolrClient server = new HttpSolrClient.Builder(url).build(); server.commit(); server.close(); } } } LOG.info("Done committing live merge"); } catch (Exception e) { LOG.error("Error sending commits to live Solr cluster", e); return false; } success = true; return true; } finally { shutdownNowAndAwaitTermination(executor); float secs = (System.currentTimeMillis() - start) / 1000.0f; LOG.info("Live merging of index shards into Solr cluster took " + secs + " secs"); if (success) { LOG.info("Live merging completed successfully"); } else { LOG.info("Live merging failed"); } } // if an output dir does not exist, we should fail and do no merge? }
From source file:org.alfresco.dataload.Loader.java
License:Open Source License
public static void main(String args[]) throws Exception { int num = Integer.parseInt(args[0]); int start = Integer.parseInt(args[1]); //String url = "http://localhost:8985/solr/joel"; String url = "http://localhost:8983/solr/collection1"; HttpSolrClient client = new HttpSolrClient(url); UpdateRequest request = new UpdateRequest(); int i = start; LocalDateTime localDate = LocalDateTime.now(); Random rand = new Random(); for (i = start; i < num + start; i++) { String s = rand.nextInt(1000) + "helloworld123"; SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", i); doc.addField("test_s", s); doc.addField("test_t", "hello world we love you"); int year = rand.nextInt(50); int month = rand.nextInt(12); int day = rand.nextInt(30); float f = rand.nextFloat(); doc.addField("year_i", Integer.toString(year)); doc.addField("month_i", Integer.toString(month)); doc.addField("day_i", Integer.toString(day)); doc.addField("price_f", Float.toString(f)); LocalDateTime randomDate = localDate.plusDays(rand.nextInt(1000)); doc.addField("date_dt", DateTimeFormatter.ISO_INSTANT.format(randomDate.toInstant(ZoneOffset.UTC))); doc.addField("epoch_l", randomDate.atZone(ZoneOffset.UTC).toInstant().toEpochMilli()); request.add(doc);// w w w . j av a2 s. c o m if (i % 5000 == 0) { request.process(client); client.commit(); request = new UpdateRequest(); } for (int l = 0; l < 5; l++) { String ps = "product" + rand.nextInt(35); doc.addField("prod_ss", ps); } } if ((i % 5000) != 0) { request.process(client); client.commit(); } client.close(); }
From source file:org.apache.ofbiz.solr.SolrProductSearch.java
License:Apache License
/** * Adds product to solr index.//from w ww. j ava 2 s.co m */ public static Map<String, Object> addToSolrIndex(DispatchContext dctx, Map<String, Object> context) throws GenericEntityException { HttpSolrClient client = null; Locale locale = (Locale) context.get("locale"); Map<String, Object> result; String productId = (String) context.get("productId"); String solrIndexName = (String) context.get("indexName"); // connectErrorNonFatal is a necessary option because in some cases it may be considered normal that solr server is unavailable; // don't want to return error and abort transactions in these cases. Boolean treatConnectErrorNonFatal = (Boolean) context.get("treatConnectErrorNonFatal"); try { Debug.logInfo("Solr: Generating and indexing document for productId '" + productId + "'", module); client = SolrUtil.getInstance().getHttpSolrClient(solrIndexName); //Debug.log(server.ping().toString()); // Construct Documents SolrInputDocument doc1 = SolrUtil.generateSolrDocument(context); Collection<SolrInputDocument> docs = new ArrayList<SolrInputDocument>(); if (Debug.verboseOn()) { Debug.logVerbose("Solr: Indexing document: " + doc1.toString(), module); } docs.add(doc1); // push Documents to server client.add(docs); client.commit(); final String statusStr = UtilProperties.getMessage(resource, "SolrDocumentForProductIdAddedToSolrIndex", UtilMisc.toMap("productId", context.get("productId")), locale); Debug.logInfo("Solr: " + statusStr, module); result = ServiceUtil.returnSuccess(statusStr); } catch (MalformedURLException e) { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); result.put("errorType", "urlError"); } catch (SolrServerException e) { if (e.getCause() != null && e.getCause() instanceof ConnectException) { final String statusStr = UtilProperties.getMessage(resource, "SolrFailureConnectingToSolrServerToCommitProductId", UtilMisc.toMap("productId", context.get("productId")), locale); if (Boolean.TRUE.equals(treatConnectErrorNonFatal)) { Debug.logWarning(e, "Solr: " + statusStr, module); result = ServiceUtil.returnFailure(statusStr); } else { Debug.logError(e, "Solr: " + statusStr, module); result = ServiceUtil.returnError(statusStr); } result.put("errorType", "connectError"); } else { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); result.put("errorType", "solrServerError"); } } catch (IOException e) { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); result.put("errorType", "ioError"); } finally { if (client != null) { try { client.close(); } catch (IOException e) { // do nothing } } } return result; }
From source file:org.apache.ofbiz.solr.SolrProductSearch.java
License:Apache License
/** * Adds a List of products to the solr index. * <p>// w w w .ja v a2 s. c o m * This is faster than reflushing the index each time. */ public static Map<String, Object> addListToSolrIndex(DispatchContext dctx, Map<String, Object> context) throws GenericEntityException { String solrIndexName = (String) context.get("indexName"); Locale locale = (Locale) context.get("locale"); HttpSolrClient client = null; Map<String, Object> result; Boolean treatConnectErrorNonFatal = (Boolean) context.get("treatConnectErrorNonFatal"); try { Collection<SolrInputDocument> docs = new ArrayList<SolrInputDocument>(); // Construct Documents List<Map<String, Object>> fieldList = UtilGenerics .<Map<String, Object>>checkList(context.get("fieldList")); Debug.logInfo("Solr: Generating and adding " + fieldList.size() + " documents to solr index", module); for (Iterator<Map<String, Object>> fieldListIterator = fieldList.iterator(); fieldListIterator .hasNext();) { SolrInputDocument doc1 = SolrUtil.generateSolrDocument(fieldListIterator.next()); if (Debug.verboseOn()) { Debug.logVerbose("Solr: Indexing document: " + doc1.toString(), module); } docs.add(doc1); } // push Documents to server client = SolrUtil.getInstance().getHttpSolrClient(solrIndexName); client.add(docs); client.commit(); final String statusStr = UtilProperties.getMessage(resource, "SolrAddedDocumentsToSolrIndex", UtilMisc.toMap("fieldList", fieldList.size()), locale); Debug.logInfo("Solr: " + statusStr, module); result = ServiceUtil.returnSuccess(statusStr); } catch (MalformedURLException e) { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); result.put("errorType", "urlError"); } catch (SolrServerException e) { if (e.getCause() != null && e.getCause() instanceof ConnectException) { final String statusStr = UtilProperties.getMessage(resource, "SolrFailureConnectingToSolrServerToCommitProductList", UtilMisc.toMap("productId", context.get("productId")), locale); if (Boolean.TRUE.equals(treatConnectErrorNonFatal)) { Debug.logWarning(e, "Solr: " + statusStr, module); result = ServiceUtil.returnFailure(statusStr); } else { Debug.logError(e, "Solr: " + statusStr, module); result = ServiceUtil.returnError(statusStr); } result.put("errorType", "connectError"); } else { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); result.put("errorType", "solrServerError"); } } catch (IOException e) { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); result.put("errorType", "ioError"); } finally { if (client != null) { try { client.close(); } catch (IOException e) { // do nothing } } } return result; }
From source file:org.apache.ofbiz.solr.SolrProductSearch.java
License:Apache License
/** * Runs a query on the Solr Search Engine and returns the results. * <p>/*from w ww . ja va2 s. com*/ * This function only returns an object of type QueryResponse, so it is probably not a good idea to call it directly from within the * groovy files (As a decent example on how to use it, however, use keywordSearch instead). */ public static Map<String, Object> runSolrQuery(DispatchContext dctx, Map<String, Object> context) { // get Connection HttpSolrClient client = null; String solrIndexName = (String) context.get("indexName"); Map<String, Object> result; try { client = SolrUtil.getInstance().getHttpSolrClient(solrIndexName); // create Query Object SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery((String) context.get("query")); // solrQuery.setQueryType("dismax"); boolean faceted = (Boolean) context.get("facet"); if (faceted) { solrQuery.setFacet(faceted); solrQuery.addFacetField("manu"); solrQuery.addFacetField("cat"); solrQuery.setFacetMinCount(1); solrQuery.setFacetLimit(8); solrQuery.addFacetQuery("listPrice:[0 TO 50]"); solrQuery.addFacetQuery("listPrice:[50 TO 100]"); solrQuery.addFacetQuery("listPrice:[100 TO 250]"); solrQuery.addFacetQuery("listPrice:[250 TO 500]"); solrQuery.addFacetQuery("listPrice:[500 TO 1000]"); solrQuery.addFacetQuery("listPrice:[1000 TO 2500]"); solrQuery.addFacetQuery("listPrice:[2500 TO 5000]"); solrQuery.addFacetQuery("listPrice:[5000 TO 10000]"); solrQuery.addFacetQuery("listPrice:[10000 TO 50000]"); solrQuery.addFacetQuery("listPrice:[50000 TO *]"); } boolean spellCheck = (Boolean) context.get("spellcheck"); if (spellCheck) { solrQuery.setParam("spellcheck", spellCheck); } boolean highLight = (Boolean) context.get("highlight"); if (highLight) { solrQuery.setHighlight(highLight); solrQuery.setHighlightSimplePre("<span class=\"highlight\">"); solrQuery.addHighlightField("description"); solrQuery.setHighlightSimplePost("</span>"); solrQuery.setHighlightSnippets(2); } // Set additional Parameter // SolrQuery.ORDER order = SolrQuery.ORDER.desc; if (context.get("viewIndex") != null && (Integer) context.get("viewIndex") > 0) { solrQuery.setStart((Integer) context.get("viewIndex")); } if (context.get("viewSize") != null && (Integer) context.get("viewSize") > 0) { solrQuery.setRows((Integer) context.get("viewSize")); } // if ((List) context.get("queryFilter") != null && ((ArrayList<SolrDocument>) context.get("queryFilter")).size() > 0) { // List filter = (List) context.get("queryFilter"); // String[] tn = new String[filter.size()]; // Iterator it = filter.iterator(); // for (int i = 0; i < filter.size(); i++) { // tn[i] = (String) filter.get(i); // } // solrQuery.setFilterQueries(tn); // } String queryFilter = (String) context.get("queryFilter"); if (UtilValidate.isNotEmpty(queryFilter)) solrQuery.setFilterQueries(queryFilter.split(" ")); if ((String) context.get("returnFields") != null) { solrQuery.setFields((String) context.get("returnFields")); } // if((Boolean)context.get("sortByReverse"))order.reverse(); if ((String) context.get("sortBy") != null && ((String) context.get("sortBy")).length() > 0) { SolrQuery.ORDER order; if (!((Boolean) context.get("sortByReverse"))) order = SolrQuery.ORDER.asc; else order = SolrQuery.ORDER.desc; solrQuery.setSort(((String) context.get("sortBy")).replaceFirst("-", ""), order); } if ((String) context.get("facetQuery") != null) { solrQuery.addFacetQuery((String) context.get("facetQuery")); } QueryResponse rsp = client.query(solrQuery); result = ServiceUtil.returnSuccess(); result.put("queryResult", rsp); } catch (Exception e) { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); } finally { if (client != null) { try { client.close(); } catch (IOException e) { // do nothing } } } return result; }
From source file:org.apache.ofbiz.solr.SolrProductSearch.java
License:Apache License
/** * Rebuilds the solr index./*from w ww . ja v a 2 s .c o m*/ */ public static Map<String, Object> rebuildSolrIndex(DispatchContext dctx, Map<String, Object> context) throws GenericEntityException { HttpSolrClient client = null; Map<String, Object> result; GenericDelegator delegator = (GenericDelegator) dctx.getDelegator(); LocalDispatcher dispatcher = dctx.getDispatcher(); GenericValue userLogin = (GenericValue) context.get("userLogin"); Locale locale = (Locale) context.get("locale"); String solrIndexName = (String) context.get("indexName"); Boolean treatConnectErrorNonFatal = (Boolean) context.get("treatConnectErrorNonFatal"); try { client = SolrUtil.getInstance().getHttpSolrClient(solrIndexName); // now lets fetch all products List<Map<String, Object>> solrDocs = new ArrayList<Map<String, Object>>(); List<GenericValue> products = delegator.findList("Product", null, null, null, null, true); int numDocs = 0; if (products != null) { numDocs = products.size(); } Debug.logInfo("Solr: Clearing solr index and rebuilding with " + numDocs + " found products", module); Iterator<GenericValue> productIterator = products.iterator(); while (productIterator.hasNext()) { GenericValue product = productIterator.next(); Map<String, Object> dispatchContext = ProductUtil.getProductContent(product, dctx, context); solrDocs.add(dispatchContext); } // this removes everything from the index client.deleteByQuery("*:*"); client.commit(); // THis adds all products to the Index (instantly) Map<String, Object> runResult = dispatcher.runSync("addListToSolrIndex", UtilMisc.toMap("fieldList", solrDocs, "userLogin", userLogin, "locale", locale, "indexName", solrIndexName, "treatConnectErrorNonFatal", treatConnectErrorNonFatal)); String runMsg = ServiceUtil.getErrorMessage(runResult); if (UtilValidate.isEmpty(runMsg)) { runMsg = null; } if (ServiceUtil.isError(runResult)) { result = ServiceUtil.returnError(runMsg); } else if (ServiceUtil.isFailure(runResult)) { result = ServiceUtil.returnFailure(runMsg); } else { final String statusMsg = UtilProperties.getMessage(resource, "SolrClearedSolrIndexAndReindexedDocuments", UtilMisc.toMap("numDocs", numDocs), locale); result = ServiceUtil.returnSuccess(statusMsg); } } catch (MalformedURLException e) { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); } catch (SolrServerException e) { if (e.getCause() != null && e.getCause() instanceof ConnectException) { final String statusStr = UtilProperties.getMessage(resource, "SolrFailureConnectingToSolrServerToRebuildIndex", locale); if (Boolean.TRUE.equals(treatConnectErrorNonFatal)) { Debug.logWarning(e, "Solr: " + statusStr, module); result = ServiceUtil.returnFailure(statusStr); } else { Debug.logError(e, "Solr: " + statusStr, module); result = ServiceUtil.returnError(statusStr); } } else { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); } } catch (IOException e) { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); } catch (ServiceAuthException e) { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); } catch (ServiceValidationException e) { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); } catch (GenericServiceException e) { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); } catch (Exception e) { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); } finally { if (client != null) { try { client.close(); } catch (IOException e) { // do nothing } } } return result; }
From source file:org.apache.sqoop.connector.solr.SolrLoader.java
License:Apache License
@Override public void load(LoaderContext context, LinkConfiguration linkConfiguration, ToJobConfiguration jobConfiguration) throws Exception { Object[] record = null;/* w ww. j a v a 2 s .c o m*/ String[] solrcolumns = jobConfiguration.toJobConfig.columns.split(","); HttpSolrClient client = new HttpSolrClient( linkConfiguration.linkConfig.solrmasterurl + jobConfiguration.toJobConfig.instance); List<SolrInputDocument> sidocs = new ArrayList<SolrInputDocument>(); while ((record = context.getDataReader().readArrayRecord()) != null) { if (solrcolumns.length != record.length) { LOG.warn("SolrLoader load record.length = " + record.length + ", but sorlcolumns.length = " + solrcolumns.length + ". quit record."); continue; } else { SolrInputDocument sidoc = new SolrInputDocument(); for (int i = 0; i < solrcolumns.length; i++) { sidoc.addField(solrcolumns[i], record[i]); } sidocs.add(sidoc); } } LOG.debug("load docs " + sidocs.size()); client.add(sidocs); client.commit(); rowWritten = sidocs.size(); LOG.debug("upload docs successfully"); client.close(); }