List of usage examples for org.apache.commons.codec.binary StringUtils getBytesUtf8
public static byte[] getBytesUtf8(final String string)
From source file:com.smartitengineering.cms.spi.impl.events.EventConsumerTest.java
@Test public void testContentConsumptionWithInvalidMessage() { mockery.checking(new Expectations() { {//from ww w . java 2s . c o m } }); EventConsumer consumer = injector.getInstance(EventConsumer.class); consumer.consume(MSG_TYPE, "CONTENT\nCREATE\n" + Base64.encodeBase64URLSafeString(StringUtils.getBytesUtf8("random string\nfor test"))); mockery.assertIsSatisfied(); }
From source file:mvm.rya.indexing.KeyParts.java
private static void appendPredicate(Statement statement, Text keyText) { Value statementValue = new Value(StringUtils.getBytesUtf8(StatementSerializer.writePredicate(statement))); byte[] hashOfValue = uniqueFromValueForKey(statementValue); appendBytes(HASH_PREFIX, keyText); // prefix the hash with a zero byte. appendBytes(hashOfValue, keyText);/*from w ww . j av a2 s . c o m*/ }
From source file:dashboard.ImportCSV.java
public void postCSVEventToMixpanel(String ip, String eventName, String eventTime, String buildNum) throws IOException { String pattern = "M/dd/yy h:mm a"; SimpleDateFormat sdf = new SimpleDateFormat(pattern); try {//from w w w .ja v a2s . c o m Date date = sdf.parse(eventTime); long timeInSecSinceEpoch = date.getTime() / 1000; t = timeInSecSinceEpoch; et = eventTime; JSONObject obj1 = new JSONObject(); obj1.put("distinct_id", ip); obj1.put("ip", ip); obj1.put("build", buildNum); obj1.put("time", timeInSecSinceEpoch); obj1.put("token", GIGA_PROJECT_TOKEN); JSONObject obj2 = new JSONObject(); obj2.put("event", eventName); obj2.put("properties", obj1); String s2 = obj2.toString(); String encodedJSON = Base64.encodeBase64String(StringUtils.getBytesUtf8(s2)); postRequest("http://api.mixpanel.com/import", "data", encodedJSON, "api_key", GIGA_API_KEY); } catch (Exception e) { throw new RuntimeException("Can't POST to Mixpanel.", e); } }
From source file:com.smartitengineering.cms.spi.impl.events.EventPublisherTest.java
public static String getContentTypeMsg() { final byte[] contentId = StringUtils .getBytesUtf8(new StringBuilder(WORSPACE_NS).append('\n').append(WORKSPACE_NAME).append('\n') .append(CONTENT_TYPE_NS).append('\n').append(CONTENT_TYPE_NAME).toString()); final String msgContent = new StringBuilder("CONTENT_TYPE\nCREATE\n") .append(Base64.encodeBase64URLSafeString(contentId)).toString(); return msgContent; }
From source file:mvm.rya.indexing.KeyParts.java
private static void appendSubjectPredicate(Statement statement, Text keyText) { Value statementValue = new Value( StringUtils.getBytesUtf8(StatementSerializer.writeSubjectPredicate(statement))); byte[] hashOfValue = uniqueFromValueForKey(statementValue); appendBytes(HASH_PREFIX, keyText); // prefix the hash with a zero byte. appendBytes(hashOfValue, keyText);//from w w w . j a v a 2 s.co m }
From source file:com.smartitengineering.cms.spi.impl.events.EventConsumerTest.java
@Test public void testContentIgnorance() { mockery.checking(new Expectations() { {//from w w w .java 2 s . c o m exactly(1).of(workspaceApi).createWorkspaceId(EventPublisherTest.WORSPACE_NS, EventPublisherTest.WORKSPACE_NAME); final WorkspaceId wId = mockery.mock(WorkspaceId.class); will(returnValue(wId)); exactly(1).of(contentLoader).createContentId(with(wId), with(new BaseMatcher<byte[]>() { public boolean matches(Object item) { return Arrays.equals(StringUtils.getBytesUtf8(EventPublisherTest.CONTENT_ID), (byte[]) item); } public void describeTo(Description description) { } })); final ContentId contentId = mockery.mock(ContentId.class); will(returnValue(contentId)); exactly(1).of(contentId).getContent(); will(returnValue(null)); } }); EventConsumer consumer = injector.getInstance(EventConsumer.class); consumer.consume(MSG_TYPE, EventPublisherTest.getContentMsg()); mockery.assertIsSatisfied(); }
From source file:com.smartitengineering.cms.spi.impl.events.EventPublisherTest.java
public static String getSequenceMsg() { final byte[] contentId = StringUtils.getBytesUtf8(new StringBuilder(WORSPACE_NS).append('\n') .append(WORKSPACE_NAME).append('\n').append(SEQUENCE_NAME).toString()); final String msgContent = new StringBuilder("SEQUENCE\nCREATE\n") .append(Base64.encodeBase64URLSafeString(contentId)).toString(); return msgContent; }
From source file:hws.core.JobClient.java
public void run(String[] args) throws Exception { //final String command = args[0]; //final int n = Integer.valueOf(args[1]); //final Path jarPath = new Path(args[2]); Options options = new Options(); /*options.addOption(OptionBuilder.withLongOpt("jar") .withDescription( "Jar path" ) .hasArg()//from w w w. j a v a2 s. com .withArgName("JarPath") .create()); options.addOption(OptionBuilder.withLongOpt("scheduler") .withDescription( "Scheduler class name" ) .hasArg() .withArgName("ClassName") .create()); */options.addOption(OptionBuilder.withLongOpt("zk-servers") .withDescription("List of the ZooKeeper servers").hasArgs().withArgName("zkAddrs").create("zks")); //options.addOption("l", "list", false, "list modules"); options.addOption(OptionBuilder.withLongOpt("load").withDescription("load new modules").hasArgs() .withArgName("XMLFiles").create()); /*options.addOption(OptionBuilder.withLongOpt( "remove" ) .withDescription( "remove modules" ) .hasArgs() .withArgName("ModuleNames") .create("rm")); */CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); //Path jarPath = null; //String schedulerClassName = null; String[] xmlFileNames = null; //String []moduleNames = null; String zksArgs = ""; String[] zkServers = null; if (cmd.hasOption("zks")) { zksArgs = "-zks"; zkServers = cmd.getOptionValues("zks"); for (String zks : zkServers) { zksArgs += " " + zks; } } //Logger setup //FSDataOutputStream writer = FileSystem.get(conf).create(new Path("hdfs:///hws/apps/"+appIdStr+"/logs/jobClient.log")); //Logger.addOutputStream(writer); /*if(cmd.hasOption("l")){ LOG.warn("Argument --list (-l) is not supported yet."); } if(cmd.hasOption("jar")){ jarPath = new Path(cmd.getOptionValue("jar")); } if(cmd.hasOption("scheduler")){ schedulerClassName = cmd.getOptionValue("scheduler"); }*/ if (cmd.hasOption("load")) { xmlFileNames = cmd.getOptionValues("load"); } /*else if(cmd.hasOption("rm")){ moduleNames = cmd.getOptionValues("rm"); }*/ //LOG.info("Jar-Path "+jarPath); if (xmlFileNames != null) { String paths = ""; for (String path : xmlFileNames) { paths += path + "; "; } LOG.info("Load XMLs: " + paths); } /*if(moduleNames!=null){ String modules = ""; for(String module: moduleNames){ modules += module+"; "; } LOG.info("remove: "+modules); }*/ // Create yarnClient YarnConfiguration conf = new YarnConfiguration(); YarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); // Create application via yarnClient YarnClientApplication app = yarnClient.createApplication(); System.out.println("LOG Path: " + ApplicationConstants.LOG_DIR_EXPANSION_VAR); // Set up the container launch context for the application master ContainerLaunchContext amContainer = Records.newRecord(ContainerLaunchContext.class); ApplicationSubmissionContext appContext = app.getApplicationSubmissionContext(); ApplicationId appId = appContext.getApplicationId(); ZkClient zk = new ZkClient(zkServers[0]); //TODO select a ZooKeeper server if (!zk.exists("/hadoop-watershed")) { zk.createPersistent("/hadoop-watershed", ""); } zk.createPersistent("/hadoop-watershed/" + appId.toString(), ""); FileSystem fs = FileSystem.get(conf); LOG.info("Collecting files to upload"); fs.mkdirs(new Path("hdfs:///hws/apps/" + appId.toString())); fs.mkdirs(new Path("hdfs:///hws/apps/" + appId.toString() + "/logs")); ModulePipeline modulePipeline = ModulePipeline.fromXMLFiles(xmlFileNames); LOG.info("Uploading files to HDFS"); for (String path : modulePipeline.files()) { uploadFile(fs, new File(path), appId); } LOG.info("Upload finished"); String modulePipelineJson = Json.dumps(modulePipeline); String modulePipelineBase64 = Base64.encodeBase64String(StringUtils.getBytesUtf8(modulePipelineJson)) .replaceAll("\\s", ""); LOG.info("ModulePipeline: " + modulePipelineJson); //LOG.info("ModulePipeline: "+modulePipelineBase64); amContainer.setCommands(Collections.singletonList("$JAVA_HOME/bin/java" + " -Xmx256M" + " hws.core.JobMaster" + " -aid " + appId.toString() + " --load " + modulePipelineBase64 + " " + zksArgs + " 1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout" + " 2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stderr")); // Setup jar for ApplicationMaster //LocalResource appMasterJar = Records.newRecord(LocalResource.class); //setupAppMasterJar(jarPath, appMasterJar); //amContainer.setLocalResources(Collections.singletonMap("hws.jar", appMasterJar)); LOG.info("Listing files for YARN-Watershed"); RemoteIterator<LocatedFileStatus> filesIterator = fs.listFiles(new Path("hdfs:///hws/bin/"), false); Map<String, LocalResource> resources = new HashMap<String, LocalResource>(); LOG.info("Files setup as resource"); while (filesIterator.hasNext()) { LocatedFileStatus fileStatus = filesIterator.next(); // Setup jar for ApplicationMaster LocalResource containerJar = Records.newRecord(LocalResource.class); ContainerUtils.setupContainerJar(fs, fileStatus.getPath(), containerJar); resources.put(fileStatus.getPath().getName(), containerJar); } LOG.info("container resource setup"); amContainer.setLocalResources(resources); fs.close(); //closing FileSystem interface // Setup CLASSPATH for ApplicationMaster Map<String, String> appMasterEnv = new HashMap<String, String>(); ContainerUtils.setupContainerEnv(appMasterEnv, conf); amContainer.setEnvironment(appMasterEnv); // Set up resource type requirements for ApplicationMaster Resource capability = Records.newRecord(Resource.class); capability.setMemory(256); capability.setVirtualCores(1); // Finally, set-up ApplicationSubmissionContext for the application //ApplicationSubmissionContext appContext = //app.getApplicationSubmissionContext(); appContext.setApplicationName("Hadoop-Watershed"); // application name appContext.setAMContainerSpec(amContainer); appContext.setResource(capability); appContext.setQueue("default"); // queue // Submit application LOG.info("Submitting application " + appId); yarnClient.submitApplication(appContext); LOG.info("Waiting for containers to finish"); zk.waitUntilExists("/hadoop-watershed/" + appId.toString() + "/done", TimeUnit.MILLISECONDS, 250); ApplicationReport appReport = yarnClient.getApplicationReport(appId); YarnApplicationState appState = appReport.getYarnApplicationState(); while (appState != YarnApplicationState.FINISHED && appState != YarnApplicationState.KILLED && appState != YarnApplicationState.FAILED) { Thread.sleep(100); appReport = yarnClient.getApplicationReport(appId); appState = appReport.getYarnApplicationState(); } System.out.println("Application " + appId + " finished with" + " state " + appState + " at " + appReport.getFinishTime()); System.out.println("deleting " + appId.toString() + " znode"); zk.deleteRecursive("/hadoop-watershed/" + appId.toString()); //TODO remove app folder from ZooKeeper }
From source file:es.uah.cc.ie.utils.DrushUpdater.java
/** * @param String */ private String encode(String s) { return Base64.encodeBase64String(StringUtils.getBytesUtf8(s)); }
From source file:com.aoppp.gatewaysdk.internal.hw.DigestUtils2.java
/** * Calculates the MD5 digest and returns the value as a 16 element <code>byte[]</code>. * * @param data//from w ww.j ava 2 s .c o m * Data to digest; converted to bytes using {@link StringUtils#getBytesUtf8(String)} * @return MD5 digest */ public static byte[] md5(final String data) { return md5(StringUtils.getBytesUtf8(data)); }