Example usage for org.apache.lucene.util IOUtils deleteFilesIgnoringExceptions

List of usage examples for org.apache.lucene.util IOUtils deleteFilesIgnoringExceptions

Introduction

In this page you can find the example usage for org.apache.lucene.util IOUtils deleteFilesIgnoringExceptions.

Prototype

public static void deleteFilesIgnoringExceptions(Collection<? extends Path> files) 

Source Link

Document

Deletes all given files, suppressing all thrown IOExceptions.

Usage

From source file:hunspell_stemmer.Dictionary.java

License:Apache License

/**
   * Creates a new Dictionary containing the information read from the provided InputStreams to hunspell affix
   * and dictionary files./*from  w  ww . j a  v  a  2 s .  c  o  m*/
   * You have to close the provided InputStreams yourself.
   *
   * @param affix InputStream for reading the hunspell affix file (won't be closed).
   * @param dictionaries InputStream for reading the hunspell dictionary files (won't be closed).
   * @throws IOException Can be thrown while reading from the InputStreams
   * @throws ParseException Can be thrown if the content of the files does not meet expected formats
   */
  public Dictionary(InputStream affix, List<InputStream> dictionaries, boolean ignoreCase)
          throws IOException, ParseException {
      this.ignoreCase = ignoreCase;
      this.needsInputCleaning = ignoreCase;
      this.needsOutputCleaning = false; // set if we have an OCONV
      flagLookup.add(new BytesRef()); // no flags -> ord 0

      Path aff = Files.createTempFile(tempDir, "affix", "aff");
      OutputStream out = new BufferedOutputStream(Files.newOutputStream(aff));
      InputStream aff1 = null;
      InputStream aff2 = null;
      boolean success = false;
      try {
          // copy contents of affix stream to temp file
          final byte[] buffer = new byte[1024 * 8];
          int len;
          while ((len = affix.read(buffer)) > 0) {
              out.write(buffer, 0, len);
          }
          out.close();

          // pass 1: get encoding
          aff1 = new BufferedInputStream(Files.newInputStream(aff));
          String encoding = getDictionaryEncoding(aff1);

          // pass 2: parse affixes
          CharsetDecoder decoder = getJavaEncoding(encoding);
          aff2 = new BufferedInputStream(Files.newInputStream(aff));
          readAffixFile(aff2, decoder);

          // read dictionary entries
          IntSequenceOutputs o = IntSequenceOutputs.getSingleton();
          Builder<IntsRef> b = new Builder<>(FST.INPUT_TYPE.BYTE4, o);
          readDictionaryFiles(dictionaries, decoder, b);
          words = b.finish();
          aliases = null; // no longer needed
          morphAliases = null; // no longer needed
          success = true;
      } finally {
          IOUtils.closeWhileHandlingException(out, aff1, aff2);
          if (success) {
              Files.delete(aff);
          } else {
              IOUtils.deleteFilesIgnoringExceptions(aff);
          }
      }
  }

From source file:hunspell_stemmer.Dictionary.java

License:Apache License

/**
   * Reads the dictionary file through the provided InputStreams, building up the words map
   */*from w w w  . j a  v a  2s .  co  m*/
   * @param dictionaries InputStreams to read the dictionary file through
   * @param decoder CharsetDecoder used to decode the contents of the file
   * @throws IOException Can be thrown while reading from the file
   */
  private void readDictionaryFiles(List<InputStream> dictionaries, CharsetDecoder decoder, Builder<IntsRef> words)
          throws IOException {
      BytesRefBuilder flagsScratch = new BytesRefBuilder();
      IntsRefBuilder scratchInts = new IntsRefBuilder();

      StringBuilder sb = new StringBuilder();

      Path unsorted = Files.createTempFile(tempDir, "unsorted", "dat");
      try (ByteSequencesWriter writer = new ByteSequencesWriter(unsorted)) {
          for (InputStream dictionary : dictionaries) {
              BufferedReader lines = new BufferedReader(new InputStreamReader(dictionary, decoder));
              String line = lines.readLine(); // first line is number of entries (approximately, sometimes)

              while ((line = lines.readLine()) != null) {
                  // wild and unpredictable code comment rules
                  if (line.isEmpty() || line.charAt(0) == '/' || line.charAt(0) == '#'
                          || line.charAt(0) == '\t') {
                      continue;
                  }
                  line = unescapeEntry(line);
                  // if we havent seen any stem exceptions, try to parse one
                  if (hasStemExceptions == false) {
                      int morphStart = line.indexOf(MORPH_SEPARATOR);
                      if (morphStart >= 0 && morphStart < line.length()) {
                          hasStemExceptions = parseStemException(line.substring(morphStart + 1)) != null;
                      }
                  }
                  if (needsInputCleaning) {
                      int flagSep = line.indexOf(FLAG_SEPARATOR);
                      if (flagSep == -1) {
                          flagSep = line.indexOf(MORPH_SEPARATOR);
                      }
                      if (flagSep == -1) {
                          CharSequence cleansed = cleanInput(line, sb);
                          writer.write(cleansed.toString().getBytes(StandardCharsets.UTF_8));
                      } else {
                          String text = line.substring(0, flagSep);
                          CharSequence cleansed = cleanInput(text, sb);
                          if (cleansed != sb) {
                              sb.setLength(0);
                              sb.append(cleansed);
                          }
                          sb.append(line.substring(flagSep));
                          writer.write(sb.toString().getBytes(StandardCharsets.UTF_8));
                      }
                  } else {
                      writer.write(line.getBytes(StandardCharsets.UTF_8));
                  }
              }
          }
      }
      Path sorted = Files.createTempFile(tempDir, "sorted", "dat");

      OfflineSorter sorter = new OfflineSorter(new Comparator<BytesRef>() {
          BytesRef scratch1 = new BytesRef();
          BytesRef scratch2 = new BytesRef();

          @Override
          public int compare(BytesRef o1, BytesRef o2) {
              scratch1.bytes = o1.bytes;
              scratch1.offset = o1.offset;
              scratch1.length = o1.length;

              for (int i = scratch1.length - 1; i >= 0; i--) {
                  if (scratch1.bytes[scratch1.offset + i] == FLAG_SEPARATOR
                          || scratch1.bytes[scratch1.offset + i] == MORPH_SEPARATOR) {
                      scratch1.length = i;
                      break;
                  }
              }

              scratch2.bytes = o2.bytes;
              scratch2.offset = o2.offset;
              scratch2.length = o2.length;

              for (int i = scratch2.length - 1; i >= 0; i--) {
                  if (scratch2.bytes[scratch2.offset + i] == FLAG_SEPARATOR
                          || scratch2.bytes[scratch2.offset + i] == MORPH_SEPARATOR) {
                      scratch2.length = i;
                      break;
                  }
              }

              int cmp = scratch1.compareTo(scratch2);
              if (cmp == 0) {
                  // tie break on whole row
                  return o1.compareTo(o2);
              } else {
                  return cmp;
              }
          }
      });
      boolean success = false;
      try {
          sorter.sort(unsorted, sorted);
          success = true;
      } finally {
          if (success) {
              Files.delete(unsorted);
          } else {
              IOUtils.deleteFilesIgnoringExceptions(unsorted);
          }
      }

      boolean success2 = false;
      ByteSequencesReader reader = new ByteSequencesReader(sorted);
      try {
          BytesRefBuilder scratchLine = new BytesRefBuilder();

          // TODO: the flags themselves can be double-chars (long) or also numeric
          // either way the trick is to encode them as char... but they must be parsed differently

          String currentEntry = null;
          IntsRefBuilder currentOrds = new IntsRefBuilder();

          String line;
          while (reader.read(scratchLine)) {
              line = scratchLine.get().utf8ToString();
              String entry;
              char wordForm[];
              int end;

              int flagSep = line.indexOf(FLAG_SEPARATOR);
              if (flagSep == -1) {
                  wordForm = NOFLAGS;
                  end = line.indexOf(MORPH_SEPARATOR);
                  entry = line.substring(0, end);
              } else {
                  end = line.indexOf(MORPH_SEPARATOR);
                  String flagPart = line.substring(flagSep + 1, end);
                  if (aliasCount > 0) {
                      flagPart = getAliasValue(Integer.parseInt(flagPart));
                  }

                  wordForm = flagParsingStrategy.parseFlags(flagPart);
                  Arrays.sort(wordForm);
                  entry = line.substring(0, flagSep);
              }
              // we possibly have morphological data
              int stemExceptionID = 0;
              if (hasStemExceptions && end + 1 < line.length()) {
                  String stemException = parseStemException(line.substring(end + 1));
                  if (stemException != null) {
                      if (stemExceptionCount == stemExceptions.length) {
                          int newSize = ArrayUtil.oversize(stemExceptionCount + 1,
                                  RamUsageEstimator.NUM_BYTES_OBJECT_REF);
                          stemExceptions = Arrays.copyOf(stemExceptions, newSize);
                      }
                      stemExceptionID = stemExceptionCount + 1; // we use '0' to indicate no exception for the form
                      stemExceptions[stemExceptionCount++] = stemException;
                  }
              }

              int cmp = currentEntry == null ? 1 : entry.compareTo(currentEntry);
              if (cmp < 0) {
                  throw new IllegalArgumentException("out of order: " + entry + " < " + currentEntry);
              } else {
                  encodeFlags(flagsScratch, wordForm);
                  int ord = flagLookup.add(flagsScratch.get());
                  if (ord < 0) {
                      // already exists in our hash
                      ord = (-ord) - 1;
                  }
                  // finalize current entry, and switch "current" if necessary
                  if (cmp > 0 && currentEntry != null) {
                      Util.toUTF32(currentEntry, scratchInts);
                      words.add(scratchInts.get(), currentOrds.get());
                  }
                  // swap current
                  if (cmp > 0 || currentEntry == null) {
                      currentEntry = entry;
                      currentOrds = new IntsRefBuilder(); // must be this way
                  }
                  if (hasStemExceptions) {
                      currentOrds.append(ord);
                      currentOrds.append(stemExceptionID);
                  } else {
                      currentOrds.append(ord);
                  }
              }
          }

          // finalize last entry
          Util.toUTF32(currentEntry, scratchInts);
          words.add(scratchInts.get(), currentOrds.get());
          success2 = true;
      } finally {
          IOUtils.closeWhileHandlingException(reader);
          if (success2) {
              Files.delete(sorted);
          } else {
              IOUtils.deleteFilesIgnoringExceptions(sorted);
          }
      }
  }

From source file:org.elasticsearch.benchmark.fs.FsAppendBenchmark.java

License:Apache License

public static void main(String[] args) throws Exception {
    Path path = PathUtils.get("work/test.log");
    IOUtils.deleteFilesIgnoringExceptions(path);

    int CHUNK = (int) ByteSizeValue.parseBytesSizeValue("1k").bytes();
    long DATA = ByteSizeValue.parseBytesSizeValue("10gb").bytes();

    byte[] data = new byte[CHUNK];
    new Random().nextBytes(data);

    StopWatch watch = new StopWatch().start("write");
    try (FileChannel channel = FileChannel.open(path, StandardOpenOption.WRITE,
            StandardOpenOption.CREATE_NEW)) {
        long position = 0;
        while (position < DATA) {
            channel.write(ByteBuffer.wrap(data), position);
            position += data.length;//from w  w  w  . j a  va  2s .  co m
        }
        watch.stop().start("flush");
        channel.force(true);
    }
    watch.stop();
    System.out.println("Wrote [" + (new ByteSizeValue(DATA)) + "], chunk [" + (new ByteSizeValue(CHUNK))
            + "], in " + watch);
}

From source file:org.elasticsearch.bootstrap.Seccomp.java

License:Apache License

/** try to install our custom rule profile into sandbox_init() to block execution */
private static void macImpl(Path tmpFile) throws IOException {
    // first be defensive: we can give nice errors this way, at the very least.
    boolean supported = Constants.MAC_OS_X;
    if (supported == false) {
        throw new IllegalStateException(
                "bug: should not be trying to initialize seatbelt for an unsupported OS");
    }//from w w w.ja  v  a 2 s.com

    // we couldn't link methods, could be some really ancient OS X (< Leopard) or some bug
    if (libc_mac == null) {
        throw new UnsupportedOperationException(
                "seatbelt unavailable: could not link methods. requires Leopard or above.");
    }

    // write rules to a temporary file, which will be passed to sandbox_init()
    Path rules = Files.createTempFile(tmpFile, "es", "sb");
    Files.write(rules, Collections.singleton(SANDBOX_RULES), StandardCharsets.UTF_8);

    boolean success = false;
    try {
        PointerByReference errorRef = new PointerByReference();
        int ret = libc_mac.sandbox_init(rules.toAbsolutePath().toString(), SANDBOX_NAMED, errorRef);
        // if sandbox_init() fails, add the message from the OS (e.g. syntax error) and free the buffer
        if (ret != 0) {
            Pointer errorBuf = errorRef.getValue();
            RuntimeException e = new UnsupportedOperationException("sandbox_init(): " + errorBuf.getString(0));
            libc_mac.sandbox_free_error(errorBuf);
            throw e;
        }
        logger.debug("OS X seatbelt initialization successful");
        success = true;
    } finally {
        if (success) {
            Files.delete(rules);
        } else {
            IOUtils.deleteFilesIgnoringExceptions(rules);
        }
    }
}

From source file:org.elasticsearch.bootstrap.Security.java

License:Apache License

/** 
 * Initializes securitymanager for the environment
 * Can only happen once!/*  w ww . j  a va 2  s  .  c  o  m*/
 */
static void configure(Environment environment) throws IOException {
    // init lucene random seed. it will use /dev/urandom where available.
    StringHelper.randomId();
    InputStream config = Security.class.getResourceAsStream(POLICY_RESOURCE);
    if (config == null) {
        throw new NoSuchFileException(POLICY_RESOURCE);
    }
    Path newConfig = processTemplate(config, environment);
    System.setProperty("java.security.policy", newConfig.toString());
    System.setSecurityManager(new SecurityManager());
    IOUtils.deleteFilesIgnoringExceptions(newConfig); // TODO: maybe log something if it fails?
}

From source file:org.elasticsearch.bootstrap.SystemCallFilter.java

License:Apache License

/** try to install our custom rule profile into sandbox_init() to block execution */
private static void macImpl(Path tmpFile) throws IOException {
    // first be defensive: we can give nice errors this way, at the very least.
    boolean supported = Constants.MAC_OS_X;
    if (supported == false) {
        throw new IllegalStateException(
                "bug: should not be trying to initialize seatbelt for an unsupported OS");
    }/*from w  w w  . j  a  va 2 s .c o m*/

    // we couldn't link methods, could be some really ancient OS X (< Leopard) or some bug
    if (libc_mac == null) {
        throw new UnsupportedOperationException(
                "seatbelt unavailable: could not link methods. requires Leopard or above.");
    }

    // write rules to a temporary file, which will be passed to sandbox_init()
    Path rules = Files.createTempFile(tmpFile, "es", "sb");
    Files.write(rules, Collections.singleton(SANDBOX_RULES));

    boolean success = false;
    try {
        PointerByReference errorRef = new PointerByReference();
        int ret = libc_mac.sandbox_init(rules.toAbsolutePath().toString(), SANDBOX_NAMED, errorRef);
        // if sandbox_init() fails, add the message from the OS (e.g. syntax error) and free the buffer
        if (ret != 0) {
            Pointer errorBuf = errorRef.getValue();
            RuntimeException e = new UnsupportedOperationException("sandbox_init(): " + errorBuf.getString(0));
            libc_mac.sandbox_free_error(errorBuf);
            throw e;
        }
        logger.debug("OS X seatbelt initialization successful");
        success = true;
    } finally {
        if (success) {
            Files.delete(rules);
        } else {
            IOUtils.deleteFilesIgnoringExceptions(rules);
        }
    }
}

From source file:org.elasticsearch.index.translog.fs.RafReference.java

License:Apache License

public void decreaseRefCount(boolean delete) {
    if (refCount.decrementAndGet() <= 0) {
        try {/*from w ww  . j  a va2  s  .co m*/
            raf.close();
        } catch (IOException e) {
            // ignore
        } finally {
            if (delete) {
                IOUtils.deleteFilesIgnoringExceptions(file.toPath());
            }
        }

    }
}

From source file:org.elasticsearch.indices.IndicesCustomDataPathIT.java

License:Apache License

@After
public void teardown() throws Exception {
    IOUtils.deleteFilesIgnoringExceptions(PathUtils.get(path));
}

From source file:org.elasticsearch.plugins.PluginManager.java

License:Apache License

private Path download(PluginHandle pluginHandle, Terminal terminal) throws IOException {
    Path pluginFile = pluginHandle.newDistroFile(environment);

    HttpDownloadHelper downloadHelper = new HttpDownloadHelper();
    boolean downloaded = false;
    boolean verified = false;
    HttpDownloadHelper.DownloadProgress progress;
    if (outputMode == OutputMode.SILENT) {
        progress = new HttpDownloadHelper.NullProgress();
    } else {//from w  w w .ja v a  2s . c  om
        progress = new HttpDownloadHelper.VerboseProgress(terminal.writer());
    }

    // first, try directly from the URL provided
    if (url != null) {
        URL pluginUrl = url;
        boolean isSecureProcotol = "https".equalsIgnoreCase(pluginUrl.getProtocol());
        boolean isAuthInfoSet = !Strings.isNullOrEmpty(pluginUrl.getUserInfo());
        if (isAuthInfoSet && !isSecureProcotol) {
            throw new IOException("Basic auth is only supported for HTTPS!");
        }

        terminal.println("Trying %s ...", pluginUrl.toExternalForm());
        try {
            downloadHelper.download(pluginUrl, pluginFile, progress, this.timeout);
            downloaded = true;
            terminal.println("Verifying %s checksums if available ...", pluginUrl.toExternalForm());
            Tuple<URL, Path> sha1Info = pluginHandle.newChecksumUrlAndFile(environment, pluginUrl, "sha1");
            verified = downloadHelper.downloadAndVerifyChecksum(sha1Info.v1(), pluginFile, sha1Info.v2(),
                    progress, this.timeout, HttpDownloadHelper.SHA1_CHECKSUM);
            Tuple<URL, Path> md5Info = pluginHandle.newChecksumUrlAndFile(environment, pluginUrl, "md5");
            verified = verified || downloadHelper.downloadAndVerifyChecksum(md5Info.v1(), pluginFile,
                    md5Info.v2(), progress, this.timeout, HttpDownloadHelper.MD5_CHECKSUM);
        } catch (ElasticsearchTimeoutException | ElasticsearchCorruptionException e) {
            throw e;
        } catch (Exception e) {
            // ignore
            terminal.println("Failed: %s", ExceptionsHelper.detailedMessage(e));
        }
    } else {
        if (PluginHandle.isOfficialPlugin(pluginHandle.name, pluginHandle.user, pluginHandle.version)) {
            checkForOfficialPlugins(pluginHandle.name);
        }
    }

    if (!downloaded && url == null) {
        // We try all possible locations
        for (URL url : pluginHandle.urls()) {
            terminal.println("Trying %s ...", url.toExternalForm());
            try {
                downloadHelper.download(url, pluginFile, progress, this.timeout);
                downloaded = true;
                terminal.println("Verifying %s checksums if available ...", url.toExternalForm());
                Tuple<URL, Path> sha1Info = pluginHandle.newChecksumUrlAndFile(environment, url, "sha1");
                verified = downloadHelper.downloadAndVerifyChecksum(sha1Info.v1(), pluginFile, sha1Info.v2(),
                        progress, this.timeout, HttpDownloadHelper.SHA1_CHECKSUM);
                Tuple<URL, Path> md5Info = pluginHandle.newChecksumUrlAndFile(environment, url, "md5");
                verified = verified || downloadHelper.downloadAndVerifyChecksum(md5Info.v1(), pluginFile,
                        md5Info.v2(), progress, this.timeout, HttpDownloadHelper.MD5_CHECKSUM);
                break;
            } catch (ElasticsearchTimeoutException | ElasticsearchCorruptionException e) {
                throw e;
            } catch (Exception e) {
                terminal.println(VERBOSE, "Failed: %s", ExceptionsHelper.detailedMessage(e));
            }
        }
    }

    if (!downloaded) {
        // try to cleanup what we downloaded
        IOUtils.deleteFilesIgnoringExceptions(pluginFile);
        throw new IOException(
                "failed to download out of all possible locations..., use --verbose to get detailed information");
    }

    if (verified == false) {
        terminal.println(
                "NOTE: Unable to verify checksum for downloaded plugin (unable to find .sha1 or .md5 file to verify)");
    }
    return pluginFile;
}

From source file:org.elasticsearch.snapshots.SharedClusterSnapshotRestoreIT.java

License:Apache License

public void testDeleteSnapshotWithMissingIndexAndShardMetadata() throws Exception {
    Client client = client();//from  ww  w .ja v a2s  .  c  o m

    Path repo = randomRepoPath();
    logger.info("-->  creating repository at {}", repo.toAbsolutePath());
    assertAcked(client.admin().cluster().preparePutRepository("test-repo").setType("fs")
            .setSettings(Settings.builder().put("location", repo).put("compress", false).put("chunk_size",
                    randomIntBetween(100, 1000), ByteSizeUnit.BYTES)));

    createIndex("test-idx-1", "test-idx-2");
    logger.info("--> indexing some data");
    indexRandom(true, client().prepareIndex("test-idx-1", "doc").setSource("foo", "bar"),
            client().prepareIndex("test-idx-2", "doc").setSource("foo", "bar"));

    logger.info("--> creating snapshot");
    CreateSnapshotResponse createSnapshotResponse = client.admin().cluster()
            .prepareCreateSnapshot("test-repo", "test-snap-1").setWaitForCompletion(true)
            .setIndices("test-idx-*").get();
    assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
    assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(),
            equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));

    logger.info("--> delete index metadata and shard metadata");
    Path indices = repo.resolve("indices");
    Path testIndex1 = indices.resolve("test-idx-1");
    Path testIndex2 = indices.resolve("test-idx-2");
    Path testIndex2Shard0 = testIndex2.resolve("0");
    IOUtils.deleteFilesIgnoringExceptions(testIndex1.resolve("snapshot-test-snap-1"));
    IOUtils.deleteFilesIgnoringExceptions(testIndex2Shard0.resolve("snapshot-test-snap-1"));

    logger.info("--> delete snapshot");
    client.admin().cluster().prepareDeleteSnapshot("test-repo", "test-snap-1").get();

    logger.info("--> make sure snapshot doesn't exist");
    assertThrows(client.admin().cluster().prepareGetSnapshots("test-repo").addSnapshots("test-snap-1"),
            SnapshotMissingException.class);
}