List of usage examples for org.apache.solr.client.solrj.impl HttpSolrClient HttpSolrClient
protected HttpSolrClient(Builder builder)
From source file:com.digitalpebble.storm.crawler.solr.SolrConnection.java
License:Apache License
public static SolrClient getClient(Map stormConf, String boltType) { String zkHost = ConfUtils.getString(stormConf, "solr." + boltType + ".zkhost", null); String solrUrl = ConfUtils.getString(stormConf, "solr." + boltType + ".url", "localhost"); String collection = ConfUtils.getString(stormConf, "solr." + boltType + ".collection", null); SolrClient client;/*w w w. j a v a2s .co m*/ if (zkHost != null && zkHost.isEmpty() == false) { client = new CloudSolrClient(zkHost); ((CloudSolrClient) client).setDefaultCollection(collection); } else { client = new HttpSolrClient(solrUrl); } return client; }
From source file:com.fline.hadoop.data.client.DataTransporter.java
License:Apache License
/** * trans rdb data to hbase./*from ww w. j a v a 2 s . c om*/ * * @param connectionurl * rdb url * @param driver * rdb driver class. such as "com.mysql.jdbc.Driver" * @param username * rdb login username * @param password * rdb login password * @param tablename * rdb table. * @param rdbcolumns * rdb table column which selected to write to hbase * @param partitioncolumn * the column which can be used to select data by set a start * value and end value * @param linenum * record num * @param hbasetable * output hbase table name * @param hbasecolumns * the columns corresponding to rdb columns * @param rowkeyparam * hbase rowkey generate param. * @param solrmasterurl * such as http://fdp-master:8983/solr/ * @param label * solr label used for search * @param listener * @throws Exception */ public static void transRDB2HBASEWithIndexOnSolr(String connectionurl, String driver, String username, String password, String tablename, String rdbcolumns, String partitioncolumn, int linenum, String hbasetable, String hbasecolumns, String rowkeyparam, String solrmasterurl, String label, DataProgressListener listener) throws Exception { HashMap<String, String> rdbconfigMap = new HashMap<String, String>(); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_CONNECTIONSTRING, connectionurl); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_DRIVER, driver); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_USERNAME, username); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_USERPASSWD, password); // configMap.put(RDBInputDataSource.CONFIG_JDBC_SCHEMA, ""); rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_COLUMNS, rdbcolumns); // rdbconfigMap.put(CONFIG_MAPNUM, "2000"); // rdbconfigMap.put("fromJobConfig.sql", "1=1 limit " + linenum); rdbconfigMap.put(CONFIG_MAPNUM, String.valueOf(linenum / 1000 + 1)); System.out.println("config_mapnum*********************:" + linenum); if (linenum <= 0) { rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_TABLE, tablename); } else { rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_TABLE, "(select * from " + tablename + " limit " + linenum + " ) as temptable"); } rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_PARTITIONCOLUMN, partitioncolumn); InputDataSource rdb = InputDataSourceFactory.createInputDataSource(Constant.RDBS_DATASOURCE, rdbconfigMap); // HBASE CONFIG HashMap<String, String> hbaseconfigMap = new HashMap<String, String>(); Configuration conf = new Configuration(); conf.addResource(new FileInputStream( DataTransporter.class.getClassLoader().getResource("").getPath() + "hbase-site.xml")); hbaseconfigMap.put(HBaseDataSource.CONFIG_HBASE_ZOOKEEPERLIST, conf.get("hbase.zookeeper.quorum")); hbaseconfigMap.put(HBaseDataSource.CONFIG_HBASE_ZKNODE, conf.get("zookeeper.znode.parent")); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_TABLENAME, hbasetable); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_COLUMNSMAP, hbasecolumns); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_ROWKEYGENEREATEDWAY, HBaseOutputDataSource.ROWKEY_GENERATED_BY_NORMAL); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_ROWKEYPARAMS, rowkeyparam); OutputDataSource hbase = OutputDataSourceFactory.createOutputDataSource(Constant.HBASE_DATASOURCE, hbaseconfigMap); // solr meta store HttpSolrClient metaclient = new HttpSolrClient(solrmasterurl + "core_for_Meta"); List<SolrInputDocument> sidocs = new ArrayList<SolrInputDocument>(); SolrInputDocument sidoc = new SolrInputDocument(); sidoc.addField("rdbtablename", tablename); sidoc.addField("rdbtablecols", generateDynamicSolrColumns(rdbcolumns)); sidocs.add(sidoc); metaclient.add(sidocs); metaclient.commit(); metaclient.close(); // solr config String rdbname = null; if (driver.contains("mysql")) { rdbname = connectionurl.substring(connectionurl.lastIndexOf('/') + 1); } else if (driver.contains("oracle")) { rdbname = connectionurl.substring(connectionurl.lastIndexOf(':') + 1); } HashMap<String, String> solrconfig = new HashMap<String, String>(); solrconfig.put(SolrDataSource.CONFIG_SOLR_MASTERURL, solrmasterurl); solrconfig.put(SolrOutputDataSource.CONFIG_SOLR_COLUMNS, "label=" + label + ",linecount=" + linenum + ",rdbname=" + rdbname + ",rdbtablename=" + hbasetable + ",createdTime=" + System.currentTimeMillis() + ",sourceType=2@di_v@" + generateDynamicSolrColumns(rdbcolumns)); System.out.println(SolrOutputDataSource.CONFIG_SOLR_COLUMNS + "\t" + "label=" + label + ",linecount=" + linenum + ",rdbname=" + rdbname + ",rdbtablename=" + tablename + ",createdTime=" + System.currentTimeMillis() + ",sourceType=2@di_v@" + generateDynamicSolrColumns(rdbcolumns)); solrconfig.put(SolrOutputDataSource.CONFIG_SOLR_INSTANCE, "core_for_searchDB"); OutputDataSource solr = new SolrOutputDataSource(solrconfig, Constant.SOLR_DATASOURCE_NAME); DataTransporter.createTransJob(rdb, hbase, null); DataTransporter.createTransJob(rdb, solr, listener); }
From source file:com.fline.hadoop.data.client.DataTransporter.java
License:Apache License
/** * trans rdb data to hbase with incre condition . * //from w w w . j a v a2 s . c om * @param connectionurl * rdb url * @param driver * rdb driver class. such as "com.mysql.jdbc.Driver" * @param username * rdb login username * @param password * rdb login password * @param tablename * rdb table. * @param rdbcolumns * rdb table column which selected to write to hbase * @param partitioncolumn * the column which can be used to select data by set a start * value and end value * @param increCheckColumn * incre check column. * @param increLastValue * start column value. * @param linenum * record num * @param hbasetable * output hbase table name * @param hbasecolumns * the columns corresponding to rdb columns * @param rowkeyparam * hbase rowkey generate param. * @param solrmasterurl * such as http://fdp-master:8983/solr/ * @param label * solr label used for search * @param listener * @throws Exception */ public static void transRDBIncre2HBASEWithIndexOnSolr(String connectionurl, String driver, String username, String password, String tablename, String rdbcolumns, String partitioncolumn, String increCheckColumn, String increLastValue, int linenum, String hbasetable, String hbasecolumns, String rowkeyparam, String solrmasterurl, String label, DataProgressListener listener) throws Exception { HashMap<String, String> rdbconfigMap = new HashMap<String, String>(); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_CONNECTIONSTRING, connectionurl); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_DRIVER, driver); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_USERNAME, username); rdbconfigMap.put(RDBDataSource.CONFIG_JDBC_USERPASSWD, password); // configMap.put(RDBInputDataSource.CONFIG_JDBC_SCHEMA, ""); rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_COLUMNS, rdbcolumns); rdbconfigMap.put("fromJobConfig.boundaryQuery", "select min(" + partitioncolumn + "),max(" + partitioncolumn + ") from " + tablename + " where " + increCheckColumn + " >= " + increLastValue); rdbconfigMap.put(CONFIG_MAPNUM, String.valueOf(linenum / 1000 + 1)); System.out.println("config_mapnum*********************:" + linenum); if (linenum <= 0) { rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_TABLE, tablename); } else { rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_TABLE, "(select * from " + tablename + " limit " + linenum + " ) as temptable"); } rdbconfigMap.put(RDBInputDataSource.CONFIG_JDBC_PARTITIONCOLUMN, partitioncolumn); InputDataSource rdb = InputDataSourceFactory.createInputDataSource(Constant.RDBS_DATASOURCE, rdbconfigMap); // HBASE CONFIG HashMap<String, String> hbaseconfigMap = new HashMap<String, String>(); Configuration conf = new Configuration(); conf.addResource(new FileInputStream( DataTransporter.class.getClassLoader().getResource("").getPath() + "hbase-site.xml")); hbaseconfigMap.put(HBaseDataSource.CONFIG_HBASE_ZOOKEEPERLIST, conf.get("hbase.zookeeper.quorum")); hbaseconfigMap.put(HBaseDataSource.CONFIG_HBASE_ZKNODE, conf.get("zookeeper.znode.parent")); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_TABLENAME, hbasetable); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_COLUMNSMAP, hbasecolumns); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_ROWKEYGENEREATEDWAY, HBaseOutputDataSource.ROWKEY_GENERATED_BY_NORMAL); hbaseconfigMap.put(HBaseOutputDataSource.CONFIG_HBASE_ROWKEYPARAMS, rowkeyparam); OutputDataSource hbase = OutputDataSourceFactory.createOutputDataSource(Constant.HBASE_DATASOURCE, hbaseconfigMap); // solr meta store HttpSolrClient metaclient = new HttpSolrClient(solrmasterurl + "core_for_Meta"); List<SolrInputDocument> sidocs = new ArrayList<SolrInputDocument>(); SolrInputDocument sidoc = new SolrInputDocument(); sidoc.addField("rdbtablename", tablename); sidoc.addField("rdbtablecols", generateDynamicSolrColumns(rdbcolumns)); sidocs.add(sidoc); metaclient.add(sidocs); metaclient.commit(); metaclient.close(); // solr config HashMap<String, String> solrconfig = new HashMap<String, String>(); solrconfig.put(SolrDataSource.CONFIG_SOLR_MASTERURL, solrmasterurl); solrconfig.put(SolrOutputDataSource.CONFIG_SOLR_COLUMNS, "label=" + label + ",linecount=" + linenum + ",rdbname=" + connectionurl.substring(connectionurl.lastIndexOf('/')) + ",rdbtablename=" + tablename + ",createdTime=" + System.currentTimeMillis() + ",sourceType=2@di_v@" + generateDynamicSolrColumns(rdbcolumns)); solrconfig.put(SolrOutputDataSource.CONFIG_SOLR_INSTANCE, "core_for_searchDB"); OutputDataSource solr = new SolrOutputDataSource(solrconfig, Constant.SOLR_DATASOURCE_NAME); DataTransporter.createTransJob(rdb, hbase, listener); DataTransporter.createTransJob(rdb, solr, null); }
From source file:com.frank.search.solr.server.support.HttpSolrClientFactoryBean.java
License:Apache License
private void createHttpSolrClient() { HttpSolrClient httpSolrClient = new HttpSolrClient(this.url); if (timeout != null) { httpSolrClient.setConnectionTimeout(timeout.intValue()); }/*from w ww . java 2 s . co m*/ if (maxConnections != null) { httpSolrClient.setMaxTotalConnections(maxConnections); } this.setSolrClient(httpSolrClient); }
From source file:com.gdn.x.ui.controller.CommonController.java
protected List searchByQuery(String queryParameter) throws MalformedURLException, SolrServerException { List actualResult = new ArrayList<String>(); try {// w w w. ja va 2s. c o m // for solr 5 HttpSolrClient solr = new HttpSolrClient(solrUrlSearch); //for Solr 4 // HttpSolrServer solr = new HttpSolrServer(""); SolrQuery query = new SolrQuery(); query.setQuery(queryParameter); query.setFields("id"); query.setRequestHandler("/browse"); query.setStart(0); query.setRows(40); QueryResponse response = solr.query(query); SolrDocumentList results = response.getResults(); int j = 0; for (int i = 0; i < results.size(); ++i, j++) { actualResult.add(results.get(i).get("id")); } } catch (IOException ex) { Logger.getLogger(RunGoldenListController.class.getName()).log(Level.SEVERE, null, ex); } return actualResult; }
From source file:com.gdn.x.ui.controller.Evaluation.CommonControllerEvaluation.java
protected List searchByQueryEvaluation(String queryParameter) throws MalformedURLException, SolrServerException { List actualResult = new ArrayList<String>(); try {/* w w w . j a v a 2 s.c o m*/ // for solr 5 HttpSolrClient solr = new HttpSolrClient(solrUrlSearch); //for Solr 4 // HttpSolrServer solr = new HttpSolrServer("http://172.17.132.9:8983/solr/collection3"); SolrQuery query = new SolrQuery(); query.setQuery(queryParameter); query.setFields("id"); query.setRequestHandler("/browse"); query.setStart(0); query.setRows(24); QueryResponse response = solr.query(query); SolrDocumentList results = response.getResults(); int j = 0; for (int i = 0; i < results.size(); ++i, j++) { actualResult.add(results.get(i).get("id")); } // System.out.println(results.getNumFound()); // System.out.println(actualResult); } catch (IOException ex) { Logger.getLogger(RunGoldenListController.class.getName()).log(Level.SEVERE, null, ex); } return actualResult; }
From source file:com.github.fengtan.sophie.dialogs.ConnectDialog.java
License:Open Source License
@Override protected void buttonPressed(int buttonId) { if (buttonId != IDialogConstants.OK_ID) { super.buttonPressed(buttonId); return;//from w w w. j a v a 2 s.com } // Populate connection label. value = combo.getText(); // Instantiate Solr client based on what the user provided. switch (selectedType) { case DIRECT_HTTP: default: client = new HttpSolrClient(combo.getText()); break; case SOLR_CLOUD: client = new CloudSolrClient(combo.getText()); break; } super.buttonPressed(buttonId); }
From source file:com.glluch.ecf2xmlmaven.Competence.java
License:Open Source License
public void toSolr() throws SolrServerException, IOException { SolrClient solr = new HttpSolrClient(serverUrl); //solr.setDefaultCollection("ecf"); SolrInputDocument doc1 = new SolrInputDocument(); doc1.addField("title", this.code); doc1.addField("content", this.terms.toString()); solr.add(doc1);/* www . j a va 2 s. c om*/ //System.out.println(a); }
From source file:com.hurence.logisland.service.solr.Solr_5_5_5_ClientService.java
License:Apache License
protected SolrClient createHttpClient(String connectionString, String collection) { return new HttpSolrClient(connectionString + "/" + collection); }
From source file:com.mycompany.sparkrentals.client.RentalSolrClient.java
public void connect(String solrUrl) { solrClient = new HttpSolrClient(solrUrl); }