List of usage examples for org.apache.commons.codec.digest DigestUtils sha1Hex
public static String sha1Hex(String data)
From source file:ddf.camel.component.catalog.content.ContentProducerDataAccessObject.java
public void createContentItem(FileSystemPersistenceProvider fileIdMap, ContentEndpoint endpoint, File ingestedFile, WatchEvent.Kind<Path> eventType, String mimeType, Map<String, Object> headers) throws SourceUnavailableException, IngestException { LOGGER.debug("Creating content item."); if (!eventType.equals(ENTRY_DELETE) && ingestedFile == null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Ingested File was null with eventType [{}]. Doing nothing.", eventType.name()); }//w w w .j a va 2s .c o m return; } String refKey = (String) headers.get(Constants.STORE_REFERENCE_KEY); String safeKey = null; String id = null; // null if the file is being stored in the content store // not null if the file lives outside the content store (external reference) if (refKey != null) { // guards against impermissible filesystem characters safeKey = DigestUtils.sha1Hex(refKey); if (fileIdMap.loadAllKeys().contains(safeKey)) { id = String.valueOf(fileIdMap.loadFromPersistence(safeKey)); } else if (!ENTRY_CREATE.equals(eventType)) { LOGGER.warn("Unable to look up id for {}, not performing {}", refKey, eventType.name()); return; } } if (ENTRY_CREATE.equals(eventType)) { CreateStorageRequest createRequest = new CreateStorageRequestImpl( Collections.singletonList( new ContentItemImpl(uuidGenerator.generateUuid(), Files.asByteSource(ingestedFile), mimeType, ingestedFile.getName(), ingestedFile.length(), null)), getProperties(headers)); CatalogFramework catalogFramework = endpoint.getComponent().getCatalogFramework(); waitForAvailableSource(catalogFramework); CreateResponse createResponse = catalogFramework.create(createRequest); if (createResponse != null) { List<Metacard> createdMetacards = createResponse.getCreatedMetacards(); if (safeKey != null) { fileIdMap.store(safeKey, createdMetacards.get(0).getId()); } logIds(createdMetacards, "created"); } } else if (ENTRY_MODIFY.equals(eventType)) { UpdateStorageRequest updateRequest = new UpdateStorageRequestImpl( Collections.singletonList(new ContentItemImpl(id, Files.asByteSource(ingestedFile), mimeType, ingestedFile.getName(), 0, null)), getProperties(headers)); UpdateResponse updateResponse = endpoint.getComponent().getCatalogFramework().update(updateRequest); if (updateResponse != null) { List<Update> updatedMetacards = updateResponse.getUpdatedMetacards(); logIds(updatedMetacards.stream().map(Update::getNewMetacard).collect(Collectors.toList()), "updated"); } } else if (ENTRY_DELETE.equals(eventType)) { DeleteRequest deleteRequest = new DeleteRequestImpl(id); DeleteResponse deleteResponse = endpoint.getComponent().getCatalogFramework().delete(deleteRequest); if (deleteResponse != null) { List<Metacard> deletedMetacards = deleteResponse.getDeletedMetacards(); if (safeKey != null) { fileIdMap.delete(safeKey); } logIds(deletedMetacards, "deleted"); } } }
From source file:lyonlancer5.karasu.util.ModFileUtils.java
@SuppressWarnings({ "rawtypes", "unchecked" }) public synchronized void doHashCheck(File jarFile) { if (doHashCheck) { if (hasInitialized) { LOGGER.info("Mod source file " + jarFile.getName() + " located at " + jarFile.getParent()); if (jarFile.isFile() && !jarFile.getName().endsWith("bin")) { try { HashMap params = (HashMap) ((HashMap) Yaml.loadType(remoteHashes, HashMap.class) .get("version")).get(Constants.VERSION); if (!params.get("jar").equals(jarFile.getName())) { LOGGER.warn("JAR filename has been changed"); }/*from ww w. jav a 2 s.c o m*/ FileInputStream fis = new FileInputStream(jarFile); HashMap<String, String> theHashes = (HashMap<String, String>) params.get("hash"); String md5 = DigestUtils.md5Hex(fis); String sha1 = DigestUtils.sha1Hex(fis); fis.close(); if (md5.equals(theHashes.get("md5"))) { LOGGER.info("Validated MD5 hash - " + md5); } else { throw new RuntimeException( "MD5 check FAILED: Expected " + md5 + " - Received " + theHashes.get("md5")); } if (sha1.equals(theHashes.get("sha1"))) { LOGGER.info("Validated SHA1 hash - " + sha1); } else { throw new RuntimeException( "SHA1 check FAILED: Expected " + sha1 + " - Received " + theHashes.get("sha1")); } } catch (IOException e) { throw new RuntimeException("Validation FAILED - I/O error", e); } } else { LOGGER.warn( "The mod is currently running on a development environment - Integrity checking will not proceed"); } } else { throw new RuntimeException("Validation FAILED - Validation utilites have not been initialized!"); } } else { LOGGER.warn("#########################################################################"); LOGGER.warn("WARNING: Integrity checks have been DISABLED!"); LOGGER.warn("Hash checks will not be performed - this mod may not run correctly"); LOGGER.warn("Any changes made to this mod will not be validated, whether it came from"); LOGGER.warn("a legitimate edit or an attempt to insert code into this modification"); LOGGER.warn("#########################################################################"); } }
From source file:Controller.ThreadExcelImport.java
@Override public void run() { //******//from www . ja v a2 s .c o m // CRIA STREAM DAS PLANILHAS // ******************* // stream planilha 1 InputStream stream1 = null; try { stream1 = new FileInputStream(new File(srcFileP1)); } catch (FileNotFoundException ex) { Logger.getLogger(ThreadExcelImport.class.getName()).log(Level.SEVERE, null, ex); } Workbook workbook1 = StreamingReader.builder().rowCacheSize(100) // number of rows to keep in memory (defaults to 10) .bufferSize(4096) // buffer size to use when reading InputStream to file (defaults to 1024) .open(stream1); // stream planilha 2 InputStream stream2 = null; try { stream2 = new FileInputStream(new File(srcFileP2)); } catch (FileNotFoundException ex) { Logger.getLogger(ThreadExcelImport.class.getName()).log(Level.SEVERE, null, ex); } Workbook workbook2 = StreamingReader.builder().rowCacheSize(100) // number of rows to keep in memory (defaults to 10) .bufferSize(4096) // buffer size to use when reading InputStream to file (defaults to 1024) .open(stream2); //****** // VERIFICA OS CABECALHOS // ******************* // cabealhos da planilha 1 Sheet sheet1 = null; sheet1 = workbook1.getSheetAt(0); // Pega de acordo com o cabealho as opes for (Row r : sheet1) { if (r.getRowNum() > 0) break; for (Integer i = 0; i < headerP1.size(); i++) { for (Cell c : r) { if (c.getStringCellValue().toLowerCase() .equals(headerP1.get(i).getColumnName().toLowerCase())) { // Adiciona o numero da coluna ao header headerP1.get(i).setColumnNumber(c.getColumnIndex()); break; } } if (headerP1.get(i).getColumnNumber() == null) { // Alguma coluna do template est ausente JOptionPane.showMessageDialog(null, "A coluna " + headerP1.get(i).getColumnName().toLowerCase() + " do template no existe como cabealho na planilha 1"); System.exit(0); } } } // cabealhos da planilha 2 Sheet sheet2 = null; sheet2 = workbook2.getSheetAt(0); // Pega de acordo com o cabealho as opes for (Row r : sheet2) { if (r.getRowNum() > 0) break; for (Integer i = 0; i < headerP2.size(); i++) { for (Cell c : r) { if (c.getStringCellValue().toLowerCase() .equals(headerP2.get(i).getColumnName().toLowerCase())) { // Adiciona o numero da coluna ao header headerP2.get(i).setColumnNumber(c.getColumnIndex()); break; } } if (headerP2.get(i).getColumnNumber() == null) { // Alguma coluna do template est ausente JOptionPane.showMessageDialog(null, "A coluna " + headerP2.get(i).getColumnName().toLowerCase() + " do template no existe como cabealho na planilha 2"); System.exit(0); } } } //****** // GRAVA EM MEMRIA A PLANILHA 2 PARA EVITAR O REABRIMENTO DA MESMA A CADA ITERAO DA PLANILHA 1 // ******************* stream2 = null; try { stream2 = new FileInputStream(new File(srcFileP2)); } catch (FileNotFoundException ex) { Logger.getLogger(ThreadExcelImport.class.getName()).log(Level.SEVERE, null, ex); } workbook2 = StreamingReader.builder().rowCacheSize(100) // number of rows to keep in memory (defaults to 10) .bufferSize(4096) // buffer size to use when reading InputStream to file (defaults to 1024) .open(stream2); sheet2 = null; sheet2 = workbook2.getSheetAt(0); for (Row rowP2 : sheet2) { if (rowP2.getRowNum() > 0) { InterfaceMigracao objInterfaceP2 = Factory.getInstance(templateName); // calcula o hash String hashChaveP2 = ""; for (String chaveP2 : colunaChave) { Integer columIndex = -1; for (Header he2 : headerP2) { if (he2.getColumnName().equals(chaveP2)) { columIndex = he2.getColumnNumber(); break; } } if (columIndex > -1) { Cell cell = null; cell = rowP2.getCell(columIndex, Row.CREATE_NULL_AS_BLANK); // hashChaveP1 = DigestUtils.sha1Hex(cell.getStringCellValue().trim().toLowerCase() + hashChaveP1 ); hashChaveP2 = DigestUtils .sha1Hex(cell.getStringCellValue().trim().toLowerCase() + hashChaveP2); } } for (Header he2 : headerP2) { Cell cell = rowP2.getCell(he2.getColumnNumber(), Row.CREATE_NULL_AS_BLANK); objInterfaceP2.setString(he2.getColumnName(), cell.getStringCellValue().trim().toLowerCase()); objInterfaceP2.setExcelRowNumber((rowP2.getRowNum() + 1)); //System.out.println("Novo loop HeaderP2 da linhaP2 " + String.valueOf(rowP2.getRowNum()) + " coluna " + he2.getColumnName() ); } if (hashChaveP2.equals("")) { JOptionPane.showMessageDialog(null, "A linha " + String.valueOf((rowP2.getRowNum() + 1)) + " da planilha 2 tem as colunas chaves nula"); System.exit(0); } else listaP2.put(hashChaveP2, objInterfaceP2); } } // limpa da memoria a workbook2 try { if (workbook2 != null) workbook2.close(); } catch (IOException ex) { Logger.getLogger(ThreadExcelImport.class.getName()).log(Level.SEVERE, null, ex); } // limpa da memoria o stream com workbook2 if (stream2 != null) try { stream2.close(); } catch (IOException ex) { Logger.getLogger(ThreadExcelImport.class.getName()).log(Level.SEVERE, null, ex); } //****** // FAZ A VALIDAO // OBSERVE QUE POR TER FEITO O FOREACH NOS PLANILHAS SE TORNA NECESS?RIO RECRIAR O STREAMING // ******************* // Executa o loop nas linhas da planilha stream1 = null; try { stream1 = new FileInputStream(new File(srcFileP1)); } catch (FileNotFoundException ex) { Logger.getLogger(ThreadExcelImport.class.getName()).log(Level.SEVERE, null, ex); } workbook1 = StreamingReader.builder().rowCacheSize(100) // number of rows to keep in memory (defaults to 10) .bufferSize(4096) // buffer size to use when reading InputStream to file (defaults to 1024) .open(stream1); sheet1 = null; sheet1 = workbook1.getSheetAt(0); InterfaceMigracao objInterfaceP1 = null; for (Row rowP1 : sheet1) { // Pega o hash dos campos chaves da planilha 1 a fim de localizar na planilha 1 String hashChaveP1 = ""; for (String chaveP1 : colunaChave) { Integer columIndex = -1; for (Header he1 : headerP1) { if (he1.getColumnName().equals(chaveP1)) { columIndex = he1.getColumnNumber(); break; } } if (columIndex > -1) { Cell cell = null; cell = rowP1.getCell(columIndex, Row.CREATE_NULL_AS_BLANK); // hashChaveP1 = DigestUtils.sha1Hex(cell.getStringCellValue().trim().toLowerCase() + hashChaveP1 ); hashChaveP1 = DigestUtils.sha1Hex(cell.getStringCellValue().trim().toLowerCase() + hashChaveP1); } } objInterfaceP1 = Factory.getInstance(templateName); // objInterfaceP2 = Factory.getInstance(templateName); objInterfaceP1.setExcelRowNumber((rowP1.getRowNum() + 1)); Notify notify = new Notify(); if (hashChaveP1.equals("")) notify.setLocalizadoP1(false); else { notify.setLocalizadoP1(true); //seta o numero da linha no excel // Preenche o objeto de interface da planilha 1 com seus respectivos dados for (Header he1 : headerP1) { Cell cell = null; cell = rowP1.getCell(he1.getColumnNumber(), Row.CREATE_NULL_AS_BLANK); objInterfaceP1.setString(he1.getColumnName(), cell.getStringCellValue().trim().toLowerCase()); } boolean p2Localizado = false; // Preenche o objeto de interface da planilha 2 com seus respectivos dados if (rowP1.getRowNum() > 0) { InterfaceMigracao objInterfaceMigracaoP2 = listaP2.get(hashChaveP1); if (objInterfaceMigracaoP2 != null) { p2Localizado = true; notify.setEntidadeP2(objInterfaceMigracaoP2); } } notify.setLocalizadoP2(p2Localizado); } isRunning = true; objInterfaceP1.setExcelRowNumber((rowP1.getRowNum() + 1)); notify.setEntidadeP1(objInterfaceP1); notify.setTotalRow((sheet1.getLastRowNum() + 1)); notify.setRunning(isRunning); notify.setHeaderP1(headerP1); notify.setHeaderP2(headerP2); setChanged(); notifyObservers(notify); } isRunning = false; // Notifica os observadores de que a execuo terminou Notify notify = new Notify(); notify.setRunning(false); setChanged(); notifyObservers(notify); listaP2 = null; }
From source file:edu.kit.dama.util.CryptUtil.java
/** * Convert the provided string to a SHA1 representation and return it as hex * string./*from ww w. jav a2 s .c o m*/ * * @param pString The plain string. * * @return The target string as SHA1toHex(SHA1(pString)). */ public static String stringToSHA1(String pString) { return DigestUtils.sha1Hex(DigestUtils.sha1(pString)); }
From source file:com.metamx.druid.client.cache.MemcachedCache.java
private static String computeKeyHash(String memcachedPrefix, NamedKey key) { // hash keys to keep things under 250 characters for memcached return memcachedPrefix + ":" + DigestUtils.sha1Hex(key.namespace) + ":" + DigestUtils.sha1Hex(key.key); }
From source file:eu.openanalytics.rsb.component.AdminResource.java
@Path("/" + SYSTEM_SUBPATH + "/r_packages") @POST//from w w w.j a v a2 s . com @Consumes({ Constants.GZIP_CONTENT_TYPE }) public void installRPackage(@QueryParam("rServiPoolUri") final String rServiPoolUri, @QueryParam("sha1hexsum") final String sha1HexSum, @QueryParam("packageName") final String packageName, final InputStream input) throws Exception { Validate.notBlank(rServiPoolUri, "missing query param: rServiPoolUri"); Validate.notBlank(sha1HexSum, "missing query param: sha1hexsum"); // store the package and tar files in temporary files final File tempDirectory = new File(FileUtils.getTempDirectory(), UUID.randomUUID().toString()); FileUtils.forceMkdir(tempDirectory); final File packageSourceFile = new File(tempDirectory, packageName); try { final FileOutputStream output = new FileOutputStream(packageSourceFile); IOUtils.copyLarge(input, output); IOUtils.closeQuietly(output); // validate the checksum final FileInputStream packageSourceInputStream = new FileInputStream(packageSourceFile); final String calculatedSha1HexSum = DigestUtils.sha1Hex(packageSourceInputStream); IOUtils.closeQuietly(packageSourceInputStream); Validate.isTrue(calculatedSha1HexSum.equals(sha1HexSum), "Invalid SHA-1 HEX checksum"); // upload to RServi rServiPackageManager.install(packageSourceFile, rServiPoolUri); // extract catalog files from $PKG_ROOT/inst/rsb/catalog extractCatalogFiles(packageSourceFile); getLogger().info("Package with checksum " + sha1HexSum + " installed to " + rServiPoolUri); } finally { try { FileUtils.forceDelete(tempDirectory); } catch (final Exception e) { getLogger().warn("Failed to delete temporary directory: " + tempDirectory, e); } } }
From source file:io.wcm.devops.maven.nodejsproxy.resource.MavenProxyResource.java
private Response getBinary(String url, String version, boolean getChecksum, String expectedChecksum) throws IOException { log.info("Proxy file: {}", url); HttpGet get = new HttpGet(url); HttpResponse response = httpClient.execute(get); if (response.getStatusLine().getStatusCode() == HttpServletResponse.SC_OK) { byte[] data = EntityUtils.toByteArray(response.getEntity()); // validate checksum if (expectedChecksum != null) { String remoteChecksum = DigestUtils.sha256Hex(data); if (!StringUtils.equals(expectedChecksum, remoteChecksum)) { log.warn("Reject file: {} - checksum comparison failed - expected: {}, actual: {}", url, expectedChecksum, remoteChecksum); return Response.status(Response.Status.NOT_FOUND).build(); }//from w ww . j ava2s .c om } if (getChecksum) { return Response.ok(DigestUtils.sha1Hex(data)).type(MediaType.TEXT_PLAIN).build(); } else { return Response.ok(data).type(MediaType.APPLICATION_OCTET_STREAM) .header(CONTENT_LENGTH, response.containsHeader(CONTENT_LENGTH) ? response.getFirstHeader(CONTENT_LENGTH).getValue() : null) .build(); } } else { EntityUtils.consumeQuietly(response.getEntity()); return Response.status(Response.Status.NOT_FOUND).build(); } }
From source file:net.morimekta.idltool.IdlUtils.java
public static Map<String, String> buildSha1Sums(Path dir) throws IOException { ImmutableSortedMap.Builder<String, String> sha1sums = ImmutableSortedMap.naturalOrder(); // TODO: Support nested directories. Files.list(dir).forEach(file -> { try {//from w w w. ja v a 2 s . co m if (Files.isRegularFile(file)) { String sha = DigestUtils.sha1Hex(Files.readAllBytes(file)); sha1sums.put(file.getFileName().toString(), sha); } } catch (IOException e) { throw new UncheckedIOException(e.getMessage(), e); } }); return sha1sums.build(); }
From source file:me.adaptive.core.data.api.UserRegistrationService.java
protected String getValidationHash(UserEntity user) { return DigestUtils.sha1Hex(user.getPasswordHash() + ':' + SystemSettingHolder.getSettingByKey(FORGOT_PASSWORD_SALT_KEY).get().getValue()); }
From source file:ddf.camel.component.catalog.content.ContentProducerDataAccessObjectTest.java
@Test public void testCreateContentItem() throws Exception { File testFile = temporaryFolder.newFile("testCreateContentItem.txt"); // make sample list of metacard and set of keys List<MetacardImpl> metacardList = ImmutableList.of(new MetacardImpl()); String uri = testFile.toURI().toASCIIString(); Set<String> keys = new HashSet<>(Collections.singletonList(String.valueOf(DigestUtils.sha1Hex(uri)))); // mock out responses for create, delete, update CreateResponse mockCreateResponse = mock(CreateResponse.class); doReturn(metacardList).when(mockCreateResponse).getCreatedMetacards(); DeleteResponse mockDeleteResponse = mock(DeleteResponse.class); doReturn(metacardList).when(mockDeleteResponse).getDeletedMetacards(); UpdateResponse mockUpdateResponse = mock(UpdateResponse.class); doReturn(metacardList).when(mockUpdateResponse).getUpdatedMetacards(); // setup mockFileSystemPersistenceProvider FileSystemPersistenceProvider mockFileSystemPersistenceProvider = mock(FileSystemPersistenceProvider.class); doReturn(keys).when(mockFileSystemPersistenceProvider).loadAllKeys(); doAnswer(invocationOnMock -> keys.remove(invocationOnMock.getArguments()[0])) .when(mockFileSystemPersistenceProvider).delete(anyString()); doReturn("sample").when(mockFileSystemPersistenceProvider).loadFromPersistence(any(String.class)); // setup mockCatalogFramework CatalogFramework mockCatalogFramework = mock(CatalogFramework.class); doReturn(mockCreateResponse).when(mockCatalogFramework).create(any(CreateStorageRequest.class)); doReturn(mockDeleteResponse).when(mockCatalogFramework).delete(any(DeleteRequest.class)); // setup mockSourceInfo SourceInfoResponse mockSourceInfoResponse = mock(SourceInfoResponse.class); SourceDescriptor mockSourceDescriptor = mock(SourceDescriptor.class); when(mockSourceDescriptor.isAvailable()).thenReturn(true); when(mockSourceInfoResponse.getSourceInfo()).thenReturn(Collections.singleton(mockSourceDescriptor)); when(mockCatalogFramework.getSourceInfo(any(SourceInfoRequest.class))).thenReturn(mockSourceInfoResponse); // setup mockComponent ContentComponent mockComponent = mock(ContentComponent.class); doReturn(mockCatalogFramework).when(mockComponent).getCatalogFramework(); // setup mockEndpoint ContentEndpoint mockEndpoint = mock(ContentEndpoint.class); doReturn(mockComponent).when(mockEndpoint).getComponent(); WatchEvent.Kind<Path> kind; String mimeType = "txt"; Map<String, Object> headers = new HashedMap(); Map<String, Serializable> attributeOverrides = new HashMap<>(); attributeOverrides.put("example", ImmutableList.of("something", "something1")); attributeOverrides.put("example2", ImmutableList.of("something2")); headers.put(Constants.ATTRIBUTE_OVERRIDES_KEY, attributeOverrides); headers.put(Constants.STORE_REFERENCE_KEY, uri); kind = StandardWatchEventKinds.ENTRY_CREATE; contentProducerDataAccessObject.createContentItem(mockFileSystemPersistenceProvider, mockEndpoint, testFile, kind, mimeType, headers);// w w w.j a v a2 s. co m verify(mockCatalogFramework).create(any(CreateStorageRequest.class)); kind = StandardWatchEventKinds.ENTRY_MODIFY; contentProducerDataAccessObject.createContentItem(mockFileSystemPersistenceProvider, mockEndpoint, testFile, kind, mimeType, headers); verify(mockCatalogFramework).update(any(UpdateStorageRequest.class)); kind = StandardWatchEventKinds.ENTRY_DELETE; contentProducerDataAccessObject.createContentItem(mockFileSystemPersistenceProvider, mockEndpoint, testFile, kind, mimeType, headers); verify(mockCatalogFramework).delete(any(DeleteRequest.class)); contentProducerDataAccessObject.createContentItem(mockFileSystemPersistenceProvider, mockEndpoint, testFile, kind, mimeType, headers); verify(mockCatalogFramework).delete(any(DeleteRequest.class)); }