List of usage examples for org.apache.solr.core SolrResourceLoader getConfigDir
public String getConfigDir()
From source file:alba.components.FilteredShowFileRequestHandler.java
License:Apache License
public static File getAdminFileFromFileSystem(SolrQueryRequest req, SolrQueryResponse rsp, Set<String> hiddenFiles) { File adminFile = null;//from w w w . j av a 2 s. c o m final SolrResourceLoader loader = req.getCore().getResourceLoader(); File configdir = new File(loader.getConfigDir()); if (!configdir.exists()) { // TODO: maybe we should just open it this way to start with? try { configdir = new File(loader.getClassLoader().getResource(loader.getConfigDir()).toURI()); } catch (URISyntaxException e) { log.error("Can not access configuration directory!"); rsp.setException(new SolrException(SolrException.ErrorCode.FORBIDDEN, "Can not access configuration directory!", e)); return null; } } String fname = req.getParams().get("file", null); if (fname == null) { adminFile = configdir; } else { fname = fname.replace('\\', '/'); // normalize slashes if (hiddenFiles.contains(fname.toUpperCase(Locale.ROOT))) { log.error("Can not access: " + fname); rsp.setException(new SolrException(SolrException.ErrorCode.FORBIDDEN, "Can not access: " + fname)); return null; } if (fname.indexOf("..") >= 0) { log.error("Invalid path: " + fname); rsp.setException(new SolrException(SolrException.ErrorCode.FORBIDDEN, "Invalid path: " + fname)); return null; } adminFile = new File(configdir, fname); } return adminFile; }
From source file:at.ac.univie.mminf.luceneSKOS.solr.SKOSFilterFactory.java
License:Apache License
@Override public void inform(ResourceLoader loader) { SolrResourceLoader solrLoader = (SolrResourceLoader) loader; String skosFile = args.get("skosFile"); String expansionTypeString = args.get("expansionType"); String bufferSizeString = args.get("bufferSize"); String languageString = args.get("language"); String typeString = args.get("type"); System.out.println("Passed argument: " + skosFile + " Type: " + expansionTypeString + " bufferSize: " + (bufferSizeString != null ? bufferSizeString : "Default") + " language: " + (languageString != null ? languageString : "All") + " type: " + (typeString != null ? typeString : "Default")); if (skosFile == null || expansionTypeString == null) { throw new IllegalArgumentException( "Mandatory parameters 'skosFile=FILENAME' or 'expansionType=[URI|LABEL]' missing"); }/*from w w w. jav a2s . c o m*/ try { if (skosFile.endsWith(".n3") || skosFile.endsWith(".rdf") || skosFile.endsWith(".ttl") || skosFile.endsWith(".zip")) { skosEngine = SKOSEngineFactory.getSKOSEngine(luceneMatchVersion, solrLoader.getConfigDir() + skosFile, languageString != null ? languageString.split(" ") : null); } else { throw new IOException( "Allowed file suffixes are: .n3 (N3), .rdf (RDF/XML), .ttl (TURTLE) and .zip (ZIP)"); } } catch (IOException e) { throw new RuntimeException("Could not instantiate SKOS engine", e); } if (expansionTypeString.equalsIgnoreCase(ExpansionType.URI.toString())) { expansionType = ExpansionType.URI; } else if (expansionTypeString.equalsIgnoreCase(ExpansionType.LABEL.toString())) { expansionType = ExpansionType.LABEL; } else { throw new IllegalArgumentException("The property 'expansionType' must be either URI or LABEL"); } if (bufferSizeString != null) { bufferSize = Integer.parseInt(bufferSizeString); if (bufferSize < 1) { throw new IllegalArgumentException( "The property 'bufferSize' must be a positive (smallish) integer"); } } if (typeString != null) { List<SKOSType> types = new ArrayList<SKOSType>(); for (String s : typeString.split(" ")) { SKOSType st = SKOSType.valueOf(s.toUpperCase()); if (st != null) { types.add(st); } } type = types.toArray(new SKOSType[types.size()]); } }
From source file:com.datasalt.pangool.solr.SolrRecordWriter.java
License:Apache License
public SolrRecordWriter(int batchSize, boolean outputZipFile, int threadCount, int queueSize, String localSolrHome, String zipName, TupleDocumentConverter converter, TaskAttemptContext context) { this.localSolrHome = localSolrHome; this.zipName = zipName; conf = context.getConfiguration();/*from www. j av a 2 s .c o m*/ this.batchSize = batchSize; setLogLevel("org.apache.solr.core", "WARN"); setLogLevel("org.apache.solr.update", "WARN"); Logger.getLogger("org.apache.solr.core").setLevel(Level.WARN); Logger.getLogger("org.apache.solr.update").setLevel(Level.WARN); java.util.logging.Logger.getLogger("org.apache.solr.core").setLevel(java.util.logging.Level.WARNING); java.util.logging.Logger.getLogger("org.apache.solr.update").setLevel(java.util.logging.Level.WARNING); setLogLevel("org.apache.solr", "WARN"); Logger.getLogger("org.apache.solr").setLevel(Level.WARN); java.util.logging.Logger.getLogger("org.apache.solr").setLevel(java.util.logging.Level.WARNING); heartBeater = new HeartBeater(context); try { heartBeater.needHeartBeat(); /** The actual file in hdfs that holds the configuration. */ this.outputZipFile = outputZipFile; this.fs = FileSystem.get(conf); perm = new Path(FileOutputFormat.getOutputPath(context), getOutFileName(context, "part")); // Make a task unique name that contains the actual index output name to // make debugging simpler // Note: if using JVM reuse, the sequence number will not be reset for a // new task using the jvm Path temp = conf.getLocalPath("mapred.local.dir", "solr_" + conf.get("mapred.task.id") + '.' + sequence.incrementAndGet()); if (outputZipFile && !perm.getName().endsWith(".zip")) { perm = perm.suffix(".zip"); } fs.delete(temp, true); // delete old, if any fs.delete(perm, true); // delete old, if any local = fs.startLocalOutput(perm, temp); solrHome = findSolrConfig(conf); // } // Verify that the solr home has a conf and lib directory if (solrHome == null) { throw new IOException("Unable to find solr home setting"); } // Setup a solr instance that we can batch writes to LOG.info("SolrHome: " + solrHome.toUri()); String dataDir = new File(local.toString(), "data").getAbsoluteFile().toString(); // copy the schema to the conf dir File confDir = new File(local.toString(), "conf"); confDir.mkdirs(); File unpackedSolrHome = new File(solrHome.toString()); FileUtils.copyDirectory(new File(unpackedSolrHome, "conf"), confDir); Properties props = new Properties(); props.setProperty("solr.data.dir", dataDir); props.setProperty("solr.home", solrHome.toString()); SolrResourceLoader loader = new SolrResourceLoader(solrHome.toString(), null, props); LOG.info(String.format( "Constructed instance information solr.home %s (%s), instance dir %s, conf dir %s, writing index to temporary directory %s, with permdir %s", solrHome, solrHome.toUri(), loader.getInstanceDir(), loader.getConfigDir(), dataDir, perm)); CoreContainer container = new CoreContainer(loader); CoreDescriptor descr = new CoreDescriptor(container, "core1", solrHome.toString()); descr.setDataDir(dataDir); descr.setCoreProperties(props); core = container.create(descr); container.register(core, false); solr = new EmbeddedSolrServer(container, "core1"); batchWriter = new BatchWriter(solr, batchSize, context.getTaskAttemptID().getTaskID(), threadCount, queueSize); this.converter = converter; } catch (Exception e) { e.printStackTrace(); LOG.error(e); throw new IllegalStateException(String.format("Failed to initialize record writer for %s, %s", context.getJobName(), conf.get("mapred.task.id")), e); } finally { heartBeater.cancelHeartBeat(); } }
From source file:com.ngdata.hbaseindexer.mr.TestUtils.java
License:Apache License
private static EmbeddedSolrServer createEmbeddedSolrServer(File solrHomeDir, FileSystem fs, Path outputShardDir) throws IOException { LOG.info("Creating embedded Solr server with solrHomeDir: " + solrHomeDir + ", fs: " + fs + ", outputShardDir: " + outputShardDir); // copy solrHomeDir to ensure it isn't modified across multiple unit tests or multiple EmbeddedSolrServer instances File tmpDir = Files.createTempDir(); tmpDir.deleteOnExit();/*from w w w . j a v a2s . c om*/ FileUtils.copyDirectory(solrHomeDir, tmpDir); solrHomeDir = tmpDir; Path solrDataDir = new Path(outputShardDir, "data"); String dataDirStr = solrDataDir.toUri().toString(); SolrResourceLoader loader = new SolrResourceLoader(Paths.get(solrHomeDir.toString()), null, null); LOG.info(String.format(Locale.ENGLISH, "Constructed instance information solr.home %s (%s), instance dir %s, conf dir %s, writing index to solr.data.dir %s, with permdir %s", solrHomeDir, solrHomeDir.toURI(), loader.getInstancePath(), loader.getConfigDir(), dataDirStr, outputShardDir)); // TODO: This is fragile and should be well documented System.setProperty("solr.directoryFactory", HdfsDirectoryFactory.class.getName()); System.setProperty("solr.lock.type", DirectoryFactory.LOCK_TYPE_HDFS); System.setProperty("solr.hdfs.nrtcachingdirectory", "false"); System.setProperty("solr.hdfs.blockcache.enabled", "false"); System.setProperty("solr.autoCommit.maxTime", "600000"); System.setProperty("solr.autoSoftCommit.maxTime", "-1"); CoreContainer container = new CoreContainer(loader); container.load(); SolrCore core = container.create("core1", Paths.get(solrHomeDir.toString()), ImmutableMap.of(CoreDescriptor.CORE_DATADIR, dataDirStr), false); if (!(core.getDirectoryFactory() instanceof HdfsDirectoryFactory)) { throw new UnsupportedOperationException( "Invalid configuration. Currently, the only DirectoryFactory supported is " + HdfsDirectoryFactory.class.getSimpleName()); } EmbeddedSolrServer solr = new EmbeddedSolrServer(container, "core1"); return solr; }
From source file:org.gbif.ocurrence.index.solr.SolrRecordWriter.java
License:Apache License
@SuppressWarnings("unchecked") public SolrRecordWriter(TaskAttemptContext context) { conf = context.getConfiguration();//from w w w . j a v a2 s. c o m batchSize = SolrOutputFormat.getBatchSize(conf); setLogLevel("org.apache.solr.core", "WARN"); setLogLevel("org.apache.solr.update", "WARN"); heartBeater = new HeartBeater(context); try { heartBeater.needHeartBeat(); /** The actual file in hdfs that holds the configuration. */ final String configuredSolrConfigPath = conf.get(SolrOutputFormat.SETUP_OK); if (configuredSolrConfigPath == null) { throw new IllegalStateException( String.format("The job did not pass %s", SolrOutputFormat.SETUP_OK)); } outputZipFile = SolrOutputFormat.isOutputZipFormat(conf); this.fs = FileSystem.get(conf); perm = new Path(FileOutputFormat.getOutputPath(context), getOutFileName(context, "part")); // Make a task unique name that contains the actual index output name to // make debugging simpler // Note: if using JVM reuse, the sequence number will not be reset for a // new task using the jvm temp = conf.getLocalPath("mapred.local.dir", "solr_" + conf.get("mapred.task.id") + '.' + sequence.incrementAndGet()); if (outputZipFile && !perm.getName().endsWith(".zip")) { perm = perm.suffix(".zip"); } fs.delete(perm, true); // delete old, if any Path local = fs.startLocalOutput(perm, temp); solrHome = findSolrConfig(conf); // } // Verify that the solr home has a conf and lib directory if (solrHome == null) { throw new IOException("Unable to find solr home setting"); } // Setup a solr instance that we can batch writes to LOG.info("SolrHome: " + solrHome.toUri()); String dataDir = new File(local.toString(), "data").toString(); // copy the schema to the conf dir File confDir = new File(local.toString(), "conf"); confDir.mkdirs(); File srcSchemaFile = new File(solrHome.toString(), "conf/schema.xml"); assert srcSchemaFile.exists(); FileUtils.copyFile(srcSchemaFile, new File(confDir, "schema.xml")); Properties props = new Properties(); props.setProperty("solr.data.dir", dataDir); props.setProperty("solr.home", solrHome.toString()); SolrResourceLoader loader = new SolrResourceLoader(solrHome.toString(), null, props); LOG.info(String.format( "Constructed instance information solr.home %s (%s), instance dir %s, conf dir %s, writing index to temporary directory %s, with permdir %s", solrHome, solrHome.toUri(), loader.getInstanceDir(), loader.getConfigDir(), dataDir, perm)); CoreContainer container = new CoreContainer(loader); CoreDescriptor descr = new CoreDescriptor(container, "core1", solrHome.toString()); descr.setDataDir(dataDir); descr.setCoreProperties(props); core = container.create(descr); container.register(core, false); solr = new EmbeddedSolrServer(container, "core1"); batchWriter = new BatchWriter(solr, batchSize, context.getTaskAttemptID().getTaskID(), SolrOutputFormat.getSolrWriterThreadCount(conf), SolrOutputFormat.getSolrWriterQueueSize(conf)); // instantiate the converter String className = SolrDocumentConverter.getSolrDocumentConverter(conf); Class<? extends SolrDocumentConverter> cls = (Class<? extends SolrDocumentConverter>) Class .forName(className); converter = (SolrDocumentConverter<K, V>) ReflectionUtils.newInstance(cls, conf); } catch (Exception e) { throw new IllegalStateException(String.format("Failed to initialize record writer for %s, %s", context.getJobName(), conf.get("mapred.task.id")), e); } finally { heartBeater.cancelHeartBeat(); } }