Example usage for org.apache.solr.client.solrj.impl HttpSolrClient add

List of usage examples for org.apache.solr.client.solrj.impl HttpSolrClient add

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj.impl HttpSolrClient add.

Prototype

public UpdateResponse add(Collection<SolrInputDocument> docs) throws SolrServerException, IOException 

Source Link

Document

Adds a collection of documents

Usage

From source file:com.fline.hadoop.data.client.DataTransporter.java

License:Apache License

/**
 * trans rdb data to hbase.//from  w  w w . ja  v  a  2  s  . c o m
 * 
 * @param connectionurl
 *            rdb url
 * @param driver
 *            rdb driver class. such as "com.mysql.jdbc.Driver"
 * @param username
 *            rdb login username
 * @param password
 *            rdb login password
 * @param tablename
 *            rdb table.
 * @param rdbcolumns
 *            rdb table column which selected to write to hbase
 * @param partitioncolumn
 *            the column which can be used to select data by set a start
 *            value and end value
 * @param linenum
 *            record num
 * @param hbasetable
 *            output hbase table name
 * @param hbasecolumns
 *            the columns corresponding to rdb columns
 * @param rowkeyparam
 *            hbase rowkey generate param.
 * @param solrmasterurl
 *            such as http://fdp-master:8983/solr/
 * @param label
 *            solr label used for search
 * @param listener
 * @throws Exception
 */
public static void transRDB2HBASEWithIndexOnSolr(String connectionurl, String driver, String username,
        String password, String tablename, String rdbcolumns, String partitioncolumn, int linenum,
        String hbasetable, String hbasecolumns, String rowkeyparam, String solrmasterurl, String label,
        DataProgressListener listener) throws Exception {
    HashMap<String, String> rdbconfigMap = new HashMap<String, String>();
    rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_CONNECTIONSTRING, connectionurl);
    rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_DRIVER, driver);
    rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_USERNAME, username);
    rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_USERPASSWD, password);
    // configMap.put(RDBInputDataSource.CONFIG_JDBC_SCHEMA, "");
    rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_COLUMNS, rdbcolumns);
    //       rdbconfigMap.put(CONFIG_MAPNUM, "2000");
    // rdbconfigMap.put("fromJobConfig.sql", "1=1 limit " + linenum);
    rdbconfigMap.put(CONFIG_MAPNUM, String.valueOf(linenum / 1000 + 1));
    System.out.println("config_mapnum*********************:" + linenum);
    if (linenum <= 0) {
        rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_TABLE, tablename);
    } else {
        rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_TABLE,
                "(select * from " + tablename + " limit " + linenum + " ) as temptable");
    }
    rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_PARTITIONCOLUMN, partitioncolumn);
    InputDataSource rdb = InputDataSourceFactory.createInputDataSource(Constant.RDBS_DATASOURCE, rdbconfigMap);
    // HBASE CONFIG
    HashMap<String, String> hbaseconfigMap = new HashMap<String, String>();
    Configuration conf = new Configuration();
    conf.addResource(new FileInputStream(
            DataTransporter.class.getClassLoader().getResource("").getPath() + "hbase-site.xml"));
    hbaseconfigMap.put(HBaseDataSource.CONFIG_HBASE_ZOOKEEPERLIST, conf.get("hbase.zookeeper.quorum"));
    hbaseconfigMap.put(HBaseDataSource.CONFIG_HBASE_ZKNODE, conf.get("zookeeper.znode.parent"));
    hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_TABLENAME, hbasetable);
    hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_COLUMNSMAP, hbasecolumns);
    hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_ROWKEYGENEREATEDWAY,
            HBaseOutputDataSource.ROWKEY_GENERATED_BY_NORMAL);
    hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_ROWKEYPARAMS, rowkeyparam);
    OutputDataSource hbase = OutputDataSourceFactory.createOutputDataSource(Constant.HBASE_DATASOURCE,
            hbaseconfigMap);
    // solr meta store
    HttpSolrClient metaclient = new HttpSolrClient(solrmasterurl + "core_for_Meta");
    List<SolrInputDocument> sidocs = new ArrayList<SolrInputDocument>();
    SolrInputDocument sidoc = new SolrInputDocument();
    sidoc.addField("rdbtablename", tablename);
    sidoc.addField("rdbtablecols", generateDynamicSolrColumns(rdbcolumns));
    sidocs.add(sidoc);
    metaclient.add(sidocs);
    metaclient.commit();
    metaclient.close();
    // solr config
    String rdbname = null;
    if (driver.contains("mysql")) {
        rdbname = connectionurl.substring(connectionurl.lastIndexOf('/') + 1);
    } else if (driver.contains("oracle")) {
        rdbname = connectionurl.substring(connectionurl.lastIndexOf(':') + 1);
    }
    HashMap<String, String> solrconfig = new HashMap<String, String>();
    solrconfig.put(SolrDataSource.CONFIG_SOLR_MASTERURL, solrmasterurl);
    solrconfig.put(SolrOutputDataSource.CONFIG_SOLR_COLUMNS,
            "label=" + label + ",linecount=" + linenum + ",rdbname=" + rdbname + ",rdbtablename=" + hbasetable
                    + ",createdTime=" + System.currentTimeMillis() + ",sourceType=2@di_v@"
                    + generateDynamicSolrColumns(rdbcolumns));
    System.out.println(SolrOutputDataSource.CONFIG_SOLR_COLUMNS + "\t" + "label=" + label + ",linecount="
            + linenum + ",rdbname=" + rdbname + ",rdbtablename=" + tablename + ",createdTime="
            + System.currentTimeMillis() + ",sourceType=2@di_v@" + generateDynamicSolrColumns(rdbcolumns));
    solrconfig.put(SolrOutputDataSource.CONFIG_SOLR_INSTANCE, "core_for_searchDB");
    OutputDataSource solr = new SolrOutputDataSource(solrconfig, Constant.SOLR_DATASOURCE_NAME);
    DataTransporter.createTransJob(rdb, hbase, null);
    DataTransporter.createTransJob(rdb, solr, listener);
}

From source file:com.fline.hadoop.data.client.DataTransporter.java

License:Apache License

/**
 * trans rdb data to hbase with incre condition .
 * //from  ww w  . j  a v a  2  s . c o m
 * @param connectionurl
 *            rdb url
 * @param driver
 *            rdb driver class. such as "com.mysql.jdbc.Driver"
 * @param username
 *            rdb login username
 * @param password
 *            rdb login password
 * @param tablename
 *            rdb table.
 * @param rdbcolumns
 *            rdb table column which selected to write to hbase
 * @param partitioncolumn
 *            the column which can be used to select data by set a start
 *            value and end value
 * @param increCheckColumn
 *            incre check column.
 * @param increLastValue
 *            start column value.
 * @param linenum
 *            record num
 * @param hbasetable
 *            output hbase table name
 * @param hbasecolumns
 *            the columns corresponding to rdb columns
 * @param rowkeyparam
 *            hbase rowkey generate param.
 * @param solrmasterurl
 *            such as http://fdp-master:8983/solr/
 * @param label
 *            solr label used for search
 * @param listener
 * @throws Exception
 */
public static void transRDBIncre2HBASEWithIndexOnSolr(String connectionurl, String driver, String username,
        String password, String tablename, String rdbcolumns, String partitioncolumn, String increCheckColumn,
        String increLastValue, int linenum, String hbasetable, String hbasecolumns, String rowkeyparam,
        String solrmasterurl, String label, DataProgressListener listener) throws Exception {
    HashMap<String, String> rdbconfigMap = new HashMap<String, String>();
    rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_CONNECTIONSTRING, connectionurl);
    rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_DRIVER, driver);
    rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_USERNAME, username);
    rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_USERPASSWD, password);
    // configMap.put(RDBInputDataSource.CONFIG_JDBC_SCHEMA, "");
    rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_COLUMNS, rdbcolumns);
    rdbconfigMap.put("fromJobConfig.boundaryQuery", "select min(" + partitioncolumn + "),max(" + partitioncolumn
            + ") from " + tablename + " where " + increCheckColumn + " >= " + increLastValue);
    rdbconfigMap.put(CONFIG_MAPNUM, String.valueOf(linenum / 1000 + 1));
    System.out.println("config_mapnum*********************:" + linenum);
    if (linenum <= 0) {
        rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_TABLE, tablename);
    } else {
        rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_TABLE,
                "(select * from " + tablename + " limit " + linenum + " ) as temptable");
    }
    rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_PARTITIONCOLUMN, partitioncolumn);
    InputDataSource rdb = InputDataSourceFactory.createInputDataSource(Constant.RDBS_DATASOURCE, rdbconfigMap);
    // HBASE CONFIG
    HashMap<String, String> hbaseconfigMap = new HashMap<String, String>();
    Configuration conf = new Configuration();
    conf.addResource(new FileInputStream(
            DataTransporter.class.getClassLoader().getResource("").getPath() + "hbase-site.xml"));
    hbaseconfigMap.put(HBaseDataSource.CONFIG_HBASE_ZOOKEEPERLIST, conf.get("hbase.zookeeper.quorum"));
    hbaseconfigMap.put(HBaseDataSource.CONFIG_HBASE_ZKNODE, conf.get("zookeeper.znode.parent"));
    hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_TABLENAME, hbasetable);
    hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_COLUMNSMAP, hbasecolumns);
    hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_ROWKEYGENEREATEDWAY,
            HBaseOutputDataSource.ROWKEY_GENERATED_BY_NORMAL);
    hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_ROWKEYPARAMS, rowkeyparam);
    OutputDataSource hbase = OutputDataSourceFactory.createOutputDataSource(Constant.HBASE_DATASOURCE,
            hbaseconfigMap);
    // solr meta store
    HttpSolrClient metaclient = new HttpSolrClient(solrmasterurl + "core_for_Meta");
    List<SolrInputDocument> sidocs = new ArrayList<SolrInputDocument>();
    SolrInputDocument sidoc = new SolrInputDocument();
    sidoc.addField("rdbtablename", tablename);
    sidoc.addField("rdbtablecols", generateDynamicSolrColumns(rdbcolumns));
    sidocs.add(sidoc);
    metaclient.add(sidocs);
    metaclient.commit();
    metaclient.close();
    // solr config
    HashMap<String, String> solrconfig = new HashMap<String, String>();
    solrconfig.put(SolrDataSource.CONFIG_SOLR_MASTERURL, solrmasterurl);
    solrconfig.put(SolrOutputDataSource.CONFIG_SOLR_COLUMNS,
            "label=" + label + ",linecount=" + linenum + ",rdbname="
                    + connectionurl.substring(connectionurl.lastIndexOf('/')) + ",rdbtablename=" + tablename
                    + ",createdTime=" + System.currentTimeMillis() + ",sourceType=2@di_v@"
                    + generateDynamicSolrColumns(rdbcolumns));
    solrconfig.put(SolrOutputDataSource.CONFIG_SOLR_INSTANCE, "core_for_searchDB");
    OutputDataSource solr = new SolrOutputDataSource(solrconfig, Constant.SOLR_DATASOURCE_NAME);
    DataTransporter.createTransJob(rdb, hbase, listener);
    DataTransporter.createTransJob(rdb, solr, null);
}

From source file:it.damore.solr.importexport.App.java

License:Open Source License

/**
 * @param client//from   www .j  a  v a 2  s  . c o  m
 * @param outputFile
 * @throws FileNotFoundException
 * @throws IOException
 * @throws SolrServerException
 */
private static void writeAllDocuments(HttpSolrClient client, File outputFile)
        throws FileNotFoundException, IOException, SolrServerException {
    if (!config.getDryRun() && config.getDeleteAll()) {
        logger.info("delete all!");
        client.deleteByQuery("*:*");
    }
    logger.info("Reading " + config.getFileName());

    try (BufferedReader pw = new BufferedReader(new FileReader(outputFile))) {
        pw.lines().collect(StreamUtils.batchCollector(config.getBlockSize(), l -> {
            List<SolrInputDocument> collect = l.stream().map(App::json2SolrInputDocument)
                    .collect(Collectors.toList());
            try {
                if (!config.getDryRun()) {
                    logger.info("adding " + collect.size() + " documents (" + incrementCounter(collect.size())
                            + ")");
                    client.add(collect);
                }
            } catch (SolrServerException | IOException e) {
                throw new RuntimeException(e);
            }
        }));
    }

    if (!config.getDryRun()) {
        logger.info("Commit");
        client.commit();
    }

}

From source file:org.apache.ofbiz.solr.SolrProductSearch.java

License:Apache License

/**
 * Adds product to solr index.//from   w  w  w .  j a  va  2s  . c o m
 */
public static Map<String, Object> addToSolrIndex(DispatchContext dctx, Map<String, Object> context)
        throws GenericEntityException {
    HttpSolrClient client = null;
    Locale locale = (Locale) context.get("locale");
    Map<String, Object> result;
    String productId = (String) context.get("productId");
    String solrIndexName = (String) context.get("indexName");
    // connectErrorNonFatal is a necessary option because in some cases it may be considered normal that solr server is unavailable;
    // don't want to return error and abort transactions in these cases.
    Boolean treatConnectErrorNonFatal = (Boolean) context.get("treatConnectErrorNonFatal");
    try {
        Debug.logInfo("Solr: Generating and indexing document for productId '" + productId + "'", module);

        client = SolrUtil.getInstance().getHttpSolrClient(solrIndexName);
        //Debug.log(server.ping().toString());

        // Construct Documents
        SolrInputDocument doc1 = SolrUtil.generateSolrDocument(context);
        Collection<SolrInputDocument> docs = new ArrayList<SolrInputDocument>();

        if (Debug.verboseOn()) {
            Debug.logVerbose("Solr: Indexing document: " + doc1.toString(), module);
        }

        docs.add(doc1);

        // push Documents to server
        client.add(docs);
        client.commit();

        final String statusStr = UtilProperties.getMessage(resource, "SolrDocumentForProductIdAddedToSolrIndex",
                UtilMisc.toMap("productId", context.get("productId")), locale);
        Debug.logInfo("Solr: " + statusStr, module);
        result = ServiceUtil.returnSuccess(statusStr);
    } catch (MalformedURLException e) {
        Debug.logError(e, e.getMessage(), module);
        result = ServiceUtil.returnError(e.toString());
        result.put("errorType", "urlError");
    } catch (SolrServerException e) {
        if (e.getCause() != null && e.getCause() instanceof ConnectException) {
            final String statusStr = UtilProperties.getMessage(resource,
                    "SolrFailureConnectingToSolrServerToCommitProductId",
                    UtilMisc.toMap("productId", context.get("productId")), locale);
            if (Boolean.TRUE.equals(treatConnectErrorNonFatal)) {
                Debug.logWarning(e, "Solr: " + statusStr, module);
                result = ServiceUtil.returnFailure(statusStr);
            } else {
                Debug.logError(e, "Solr: " + statusStr, module);
                result = ServiceUtil.returnError(statusStr);
            }
            result.put("errorType", "connectError");
        } else {
            Debug.logError(e, e.getMessage(), module);
            result = ServiceUtil.returnError(e.toString());
            result.put("errorType", "solrServerError");
        }
    } catch (IOException e) {
        Debug.logError(e, e.getMessage(), module);
        result = ServiceUtil.returnError(e.toString());
        result.put("errorType", "ioError");
    } finally {
        if (client != null) {
            try {
                client.close();
            } catch (IOException e) {
                // do nothing
            }
        }
    }
    return result;
}

From source file:org.apache.ofbiz.solr.SolrProductSearch.java

License:Apache License

/**
 * Adds a List of products to the solr index.
 * <p>//from  www  .  j a  v  a 2  s.  c o m
 * This is faster than reflushing the index each time.
 */
public static Map<String, Object> addListToSolrIndex(DispatchContext dctx, Map<String, Object> context)
        throws GenericEntityException {
    String solrIndexName = (String) context.get("indexName");
    Locale locale = (Locale) context.get("locale");
    HttpSolrClient client = null;
    Map<String, Object> result;
    Boolean treatConnectErrorNonFatal = (Boolean) context.get("treatConnectErrorNonFatal");
    try {
        Collection<SolrInputDocument> docs = new ArrayList<SolrInputDocument>();

        // Construct Documents
        List<Map<String, Object>> fieldList = UtilGenerics
                .<Map<String, Object>>checkList(context.get("fieldList"));

        Debug.logInfo("Solr: Generating and adding " + fieldList.size() + " documents to solr index", module);

        for (Iterator<Map<String, Object>> fieldListIterator = fieldList.iterator(); fieldListIterator
                .hasNext();) {
            SolrInputDocument doc1 = SolrUtil.generateSolrDocument(fieldListIterator.next());
            if (Debug.verboseOn()) {
                Debug.logVerbose("Solr: Indexing document: " + doc1.toString(), module);
            }
            docs.add(doc1);
        }
        // push Documents to server
        client = SolrUtil.getInstance().getHttpSolrClient(solrIndexName);
        client.add(docs);
        client.commit();

        final String statusStr = UtilProperties.getMessage(resource, "SolrAddedDocumentsToSolrIndex",
                UtilMisc.toMap("fieldList", fieldList.size()), locale);
        Debug.logInfo("Solr: " + statusStr, module);
        result = ServiceUtil.returnSuccess(statusStr);
    } catch (MalformedURLException e) {
        Debug.logError(e, e.getMessage(), module);
        result = ServiceUtil.returnError(e.toString());
        result.put("errorType", "urlError");
    } catch (SolrServerException e) {
        if (e.getCause() != null && e.getCause() instanceof ConnectException) {
            final String statusStr = UtilProperties.getMessage(resource,
                    "SolrFailureConnectingToSolrServerToCommitProductList",
                    UtilMisc.toMap("productId", context.get("productId")), locale);
            if (Boolean.TRUE.equals(treatConnectErrorNonFatal)) {
                Debug.logWarning(e, "Solr: " + statusStr, module);
                result = ServiceUtil.returnFailure(statusStr);
            } else {
                Debug.logError(e, "Solr: " + statusStr, module);
                result = ServiceUtil.returnError(statusStr);
            }
            result.put("errorType", "connectError");
        } else {
            Debug.logError(e, e.getMessage(), module);
            result = ServiceUtil.returnError(e.toString());
            result.put("errorType", "solrServerError");
        }
    } catch (IOException e) {
        Debug.logError(e, e.getMessage(), module);
        result = ServiceUtil.returnError(e.toString());
        result.put("errorType", "ioError");
    } finally {
        if (client != null) {
            try {
                client.close();
            } catch (IOException e) {
                // do nothing
            }
        }
    }
    return result;
}

From source file:org.apache.sqoop.connector.solr.SolrLoader.java

License:Apache License

@Override
public void load(LoaderContext context, LinkConfiguration linkConfiguration,
        ToJobConfiguration jobConfiguration) throws Exception {
    Object[] record = null;/*from ww  w  .j a  v a 2s  .  c o m*/
    String[] solrcolumns = jobConfiguration.toJobConfig.columns.split(",");
    HttpSolrClient client = new HttpSolrClient(
            linkConfiguration.linkConfig.solrmasterurl + jobConfiguration.toJobConfig.instance);
    List<SolrInputDocument> sidocs = new ArrayList<SolrInputDocument>();
    while ((record = context.getDataReader().readArrayRecord()) != null) {
        if (solrcolumns.length != record.length) {
            LOG.warn("SolrLoader load record.length = " + record.length + ", but sorlcolumns.length = "
                    + solrcolumns.length + ". quit record.");
            continue;
        } else {
            SolrInputDocument sidoc = new SolrInputDocument();
            for (int i = 0; i < solrcolumns.length; i++) {
                sidoc.addField(solrcolumns[i], record[i]);
            }
            sidocs.add(sidoc);
        }
    }
    LOG.debug("load docs " + sidocs.size());
    client.add(sidocs);
    client.commit();
    rowWritten = sidocs.size();
    LOG.debug("upload docs successfully");
    client.close();
}