List of usage examples for com.fasterxml.jackson.databind ObjectMapper writerWithDefaultPrettyPrinter
public ObjectWriter writerWithDefaultPrettyPrinter()
From source file:org.apache.pulsar.testclient.PerformanceProducer.java
public static void main(String[] args) throws Exception { final Arguments arguments = new Arguments(); JCommander jc = new JCommander(arguments); jc.setProgramName("pulsar-perf-producer"); try {/*from w w w . j ava 2 s .co m*/ jc.parse(args); } catch (ParameterException e) { System.out.println(e.getMessage()); jc.usage(); System.exit(-1); } if (arguments.help) { jc.usage(); System.exit(-1); } if (arguments.destinations.size() != 1) { System.out.println("Only one topic name is allowed"); jc.usage(); System.exit(-1); } if (arguments.confFile != null) { Properties prop = new Properties(System.getProperties()); prop.load(new FileInputStream(arguments.confFile)); if (arguments.serviceURL == null) { arguments.serviceURL = prop.getProperty("brokerServiceUrl"); } if (arguments.serviceURL == null) { arguments.serviceURL = prop.getProperty("webServiceUrl"); } // fallback to previous-version serviceUrl property to maintain backward-compatibility if (arguments.serviceURL == null) { arguments.serviceURL = prop.getProperty("serviceUrl", "http://localhost:8080/"); } if (arguments.authPluginClassName == null) { arguments.authPluginClassName = prop.getProperty("authPlugin", null); } if (arguments.authParams == null) { arguments.authParams = prop.getProperty("authParams", null); } } arguments.testTime = TimeUnit.SECONDS.toMillis(arguments.testTime); // Dump config variables ObjectMapper m = new ObjectMapper(); ObjectWriter w = m.writerWithDefaultPrettyPrinter(); log.info("Starting Pulsar perf producer with config: {}", w.writeValueAsString(arguments)); // Read payload data from file if needed byte payloadData[]; if (arguments.payloadFilename != null) { payloadData = Files.readAllBytes(Paths.get(arguments.payloadFilename)); } else { payloadData = new byte[arguments.msgSize]; } // Now processing command line arguments String prefixTopicName = arguments.destinations.get(0); List<Future<Producer>> futures = Lists.newArrayList(); EventLoopGroup eventLoopGroup; if (SystemUtils.IS_OS_LINUX) { eventLoopGroup = new EpollEventLoopGroup(Runtime.getRuntime().availableProcessors(), new DefaultThreadFactory("pulsar-perf-producer")); } else { eventLoopGroup = new NioEventLoopGroup(Runtime.getRuntime().availableProcessors(), new DefaultThreadFactory("pulsar-perf-producer")); } ClientConfiguration clientConf = new ClientConfiguration(); clientConf.setConnectionsPerBroker(arguments.maxConnections); clientConf.setStatsInterval(arguments.statsIntervalSeconds, TimeUnit.SECONDS); if (isNotBlank(arguments.authPluginClassName)) { clientConf.setAuthentication(arguments.authPluginClassName, arguments.authParams); } PulsarClient client = new PulsarClientImpl(arguments.serviceURL, clientConf, eventLoopGroup); ProducerConfiguration producerConf = new ProducerConfiguration(); producerConf.setSendTimeout(0, TimeUnit.SECONDS); producerConf.setCompressionType(arguments.compression); // enable round robin message routing if it is a partitioned topic producerConf.setMessageRoutingMode(MessageRoutingMode.RoundRobinPartition); if (arguments.batchTime > 0) { producerConf.setBatchingMaxPublishDelay(arguments.batchTime, TimeUnit.MILLISECONDS); producerConf.setBatchingEnabled(true); producerConf.setMaxPendingMessages(arguments.msgRate); } // Block if queue is full else we will start seeing errors in sendAsync producerConf.setBlockIfQueueFull(true); for (int i = 0; i < arguments.numTopics; i++) { String topic = (arguments.numTopics == 1) ? prefixTopicName : String.format("%s-%d", prefixTopicName, i); log.info("Adding {} publishers on destination {}", arguments.numProducers, topic); for (int j = 0; j < arguments.numProducers; j++) { futures.add(client.createProducerAsync(topic, producerConf)); } } final List<Producer> producers = Lists.newArrayListWithCapacity(futures.size()); for (Future<Producer> future : futures) { producers.add(future.get()); } log.info("Created {} producers", producers.size()); Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { printAggregatedStats(); } }); Collections.shuffle(producers); AtomicBoolean isDone = new AtomicBoolean(); executor.submit(() -> { try { RateLimiter rateLimiter = RateLimiter.create(arguments.msgRate); long startTime = System.currentTimeMillis(); // Send messages on all topics/producers long totalSent = 0; while (true) { for (Producer producer : producers) { if (arguments.testTime > 0) { if (System.currentTimeMillis() - startTime > arguments.testTime) { log.info("------------------- DONE -----------------------"); printAggregatedStats(); isDone.set(true); Thread.sleep(5000); System.exit(0); } } if (arguments.numMessages > 0) { if (totalSent++ >= arguments.numMessages) { log.info("------------------- DONE -----------------------"); printAggregatedStats(); isDone.set(true); Thread.sleep(5000); System.exit(0); } } rateLimiter.acquire(); final long sendTime = System.nanoTime(); producer.sendAsync(payloadData).thenRun(() -> { messagesSent.increment(); bytesSent.add(payloadData.length); long latencyMicros = NANOSECONDS.toMicros(System.nanoTime() - sendTime); recorder.recordValue(latencyMicros); cumulativeRecorder.recordValue(latencyMicros); }).exceptionally(ex -> { log.warn("Write error on message", ex); System.exit(-1); return null; }); } } } catch (Throwable t) { log.error("Got error", t); } }); // Print report stats long oldTime = System.nanoTime(); Histogram reportHistogram = null; String statsFileName = "perf-producer-" + System.currentTimeMillis() + ".hgrm"; log.info("Dumping latency stats to {}", statsFileName); PrintStream histogramLog = new PrintStream(new FileOutputStream(statsFileName), false); HistogramLogWriter histogramLogWriter = new HistogramLogWriter(histogramLog); // Some log header bits histogramLogWriter.outputLogFormatVersion(); histogramLogWriter.outputLegend(); while (true) { try { Thread.sleep(10000); } catch (InterruptedException e) { break; } if (isDone.get()) { break; } long now = System.nanoTime(); double elapsed = (now - oldTime) / 1e9; double rate = messagesSent.sumThenReset() / elapsed; double throughput = bytesSent.sumThenReset() / elapsed / 1024 / 1024 * 8; reportHistogram = recorder.getIntervalHistogram(reportHistogram); log.info( "Throughput produced: {} msg/s --- {} Mbit/s --- Latency: mean: {} ms - med: {} - 95pct: {} - 99pct: {} - 99.9pct: {} - 99.99pct: {} - Max: {}", throughputFormat.format(rate), throughputFormat.format(throughput), dec.format(reportHistogram.getMean() / 1000.0), dec.format(reportHistogram.getValueAtPercentile(50) / 1000.0), dec.format(reportHistogram.getValueAtPercentile(95) / 1000.0), dec.format(reportHistogram.getValueAtPercentile(99) / 1000.0), dec.format(reportHistogram.getValueAtPercentile(99.9) / 1000.0), dec.format(reportHistogram.getValueAtPercentile(99.99) / 1000.0), dec.format(reportHistogram.getMaxValue() / 1000.0)); histogramLogWriter.outputIntervalHistogram(reportHistogram); reportHistogram.reset(); oldTime = now; } client.close(); }
From source file:rpex.hadoop.Main.java
public static void main(String... args) throws Exception { LOGGER.debug("STARTING..."); ObjectMapper objectMapper = new ObjectMapper(); RatpackServer/*w w w .ja va 2 s . c o m*/ .start(spec -> spec .serverConfig(builder -> builder.baseDir(BaseDir.find("application.properties")) .props(Main.class.getClassLoader().getResource("application.properties")).env() .sysProps().require("/hadoop", MapReduceConfig.class)) .registry(Guice.registry(bindingsSpec -> { bindingsSpec.bindInstance(ResponseTimer.decorator()).module(MapReduceModule.class, config -> config .copyOf(bindingsSpec.getServerConfig().get(MapReduceConfig.class))); Jackson.Init.register(bindingsSpec, objectMapper, objectMapper.writerWithDefaultPrettyPrinter()); })).handlers(chain -> chain.all(new Handler() { @Override public void handle(Context ctx) throws Exception { MDC.put("clientIP", ctx.getRequest().getRemoteAddress().getHostText()); RequestId.Generator generator = ctx.maybeGet(RequestId.Generator.class) .orElse(UuidBasedRequestIdGenerator.INSTANCE); RequestId requestId = generator.generate(ctx); ctx.getRequest().add(RequestId.class, requestId); MDC.put("requestId", requestId.getId()); LOGGER.debug("ALL CALLED"); ctx.next(); } }).prefix("v1", chain1 -> chain1.get("api-def", ctx -> { LOGGER.debug("GET API_DEF.JSON"); ctx.render(ctx.file("public/apidef/api-def.json")); }).prefix("mr", MapReduceEndpoints.class)))); }
From source file:edu.ucsd.crbs.cws.App.java
License:asdf
public static void main(String[] args) { Job.REFS_ENABLED = false;/*ww w .j a v a 2 s.co m*/ Workflow.REFS_ENABLED = false; try { OptionParser parser = new OptionParser() { { accepts(UPLOAD_WF_ARG, "Add/Update Workflow").withRequiredArg().ofType(File.class) .describedAs("Kepler .kar file"); //accepts(LOAD_TEST,"creates lots of workflows and jobs"); accepts(SYNC_WITH_CLUSTER_ARG, "Submits & Synchronizes Workflow Jobs on local cluster with CRBS Workflow Webservice. Requires --" + PROJECT_ARG + " --" + PORTALNAME_ARG + " --" + PORTAL_URL_ARG + " --" + HELP_EMAIL_ARG).withRequiredArg().ofType(String.class).describedAs("URL"); accepts(GEN_OLD_KEPLER_XML_ARG, "Generates version 1.x kepler xml for given workflow") .withRequiredArg().ofType(String.class).describedAs("wfid or .kar file"); accepts(UPLOAD_FILE_ARG, "Registers and uploads Workspace file to REST service") .withRequiredArg().ofType(File.class); accepts(REGISTER_FILE_ARG, "Registers Workspace file to REST service (DOES NOT UPLOAD FILE TO REST SERVICE)") .withRequiredArg().ofType(File.class); accepts(GET_WORKSPACE_FILE_INFO_ARG, "Outputs JSON of specified workspace file(s)") .withRequiredArg().ofType(String.class).describedAs("workspace file id"); accepts(GET_WORKFLOW_ARG, "Outputs JSON of specified Workflow").withRequiredArg() .ofType(Long.class).describedAs("Workflow Id"); accepts(DOWNLOAD_FILE_ARG, "Downloads Workspace file").withRequiredArg().ofType(String.class) .describedAs("workspace file id"); accepts(UPDATE_PATH_ARG, "Updates Workspace file path").withRequiredArg().ofType(String.class) .describedAs("workspace file id"); accepts(PATH_ARG, "Sets WorkspaceFile file path. Used in coordination with --" + UPDATE_PATH_ARG) .withRequiredArg().ofType(String.class).describedAs("file path"); accepts(URL_ARG, "URL to use with --" + UPLOAD_WF_ARG + ", --" + UPLOAD_FILE_ARG + ", --" + GET_WORKSPACE_FILE_INFO_ARG + " flags").withRequiredArg().ofType(String.class) .describedAs("URL"); accepts(EXAMPLE_JSON_ARG, "Outputs example JSON of Job, User, Workflow, and WorkspaceFile objects"); accepts(WF_EXEC_DIR_ARG, "Workflow Execution Directory").withRequiredArg().ofType(File.class) .describedAs("Directory"); accepts(WF_DIR_ARG, "Workflows Directory").withRequiredArg().ofType(File.class) .describedAs("Directory"); accepts(KEPLER_SCRIPT_ARG, "Kepler").withRequiredArg().ofType(File.class).describedAs("Script"); accepts(QUEUE_ARG, "SGE Queue").withRequiredArg().ofType(String.class).describedAs("Queue"); accepts(CAST_ARG, "Panfishcast binary").withRequiredArg().ofType(File.class) .describedAs("panfishcast"); accepts(STAT_ARG, "Panfishstat binary").withRequiredArg().ofType(File.class) .describedAs("panfishstat"); accepts(LOGIN_ARG, "User Login").withRequiredArg().ofType(String.class).describedAs("username"); accepts(TOKEN_ARG, "User Token").withRequiredArg().ofType(String.class).describedAs("token"); accepts(RUN_AS_ARG, "User to run as (for power accounts that can run as other users)") .withRequiredArg().ofType(String.class).describedAs("runas"); accepts(OWNER_ARG, "Sets owner when creating Workspace file and Workflow").withRequiredArg() .ofType(String.class).describedAs("username"); accepts(JOB_ID_ARG, "Sets source job id for Workspace file when used with --" + UPLOAD_FILE_ARG + " and --" + REGISTER_FILE_ARG).withRequiredArg().ofType(Long.class) .describedAs("Job Id"); accepts(MD5_ARG, "Sets md5 for Workspace file when used with --" + UPLOAD_FILE_ARG + " and --" + REGISTER_FILE_ARG).withRequiredArg().ofType(String.class) .describedAs("MD5 message digest"); accepts(SIZE_ARG, "Sets size in bytes for Workspace file when used with --" + UPLOAD_FILE_ARG + " and --" + REGISTER_FILE_ARG).withRequiredArg().ofType(Long.class) .describedAs("Size of file/dir in bytes"); accepts(RESAVE_WORKSPACEFILE_ARG, "Resaves Workspace file").withRequiredArg().ofType(Long.class) .describedAs("WorkspaceFile Id or -1 to resave all"); accepts(RESAVE_JOB_ARG, "Resaves Job").withRequiredArg().ofType(Long.class) .describedAs("Job Id or -1 to resave all"); accepts(RESAVE_WORKFLOW_ARG, "Resaves Workflow").withRequiredArg().ofType(Long.class) .describedAs("Workflow Id or -1 to resave all"); accepts(PREVIEW_WORKFLOW_ARG, "Preview Workflow on Web, requires --" + URL_ARG + " currently it should be: http://imafish.dynamic.ucsd.edu/cws/makepreview") .withRequiredArg().ofType(File.class).describedAs("Kepler .kar file"); accepts(DESCRIPTION_ARG, "Description for WorkspaceFile").withRequiredArg() .ofType(String.class); accepts(TYPE_ARG, "Type of WorkspaceFile").withRequiredArg().ofType(String.class); accepts(NAME_ARG, "Sets name for Workspace file when used with --" + UPLOAD_FILE_ARG + " and --" + REGISTER_FILE_ARG).withRequiredArg().ofType(String.class) .describedAs("WorkspaceFile name"); accepts(REGISTER_JAR_ARG, "Path to Jar to register WorkspaceFiles").withRequiredArg() .ofType(File.class).describedAs("Path to this jar"); accepts(GET_JOB_ARG, "Gets job from service in JSON format, requires --" + URL_ARG) .withRequiredArg().ofType(Long.class).describedAs("Job Id"); accepts(GET_WORKSPACE_FILE_ARG, "Gets WorkspaceFile from service in JSON format, requires --" + URL_ARG) .withRequiredArg().ofType(Long.class) .describedAs("WorkspaceFile Id or -1 for all"); accepts(PROJECT_ARG, "Project name ie CRBS. Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(PORTALNAME_ARG, "Portal name ie SLASH portal Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(PORTAL_URL_ARG, "Portal url ie http://slashsegmentation.com Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(HELP_EMAIL_ARG, "Help and reply to email address Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(BCC_EMAIL_ARG, "Blind Carbon copy email address Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(WORKSPACE_FILE_FAILED_ARG, "Denotes whether workspacefile failed (true) or not (false). Used with --" + UPDATE_PATH_ARG).withRequiredArg().ofType(Boolean.class) .describedAs("false = success and true = failed"); accepts(ERROR_EMAIL_ARG, "Email to receive notifications if errors are encountered. Used with --" + SYNC_WITH_CLUSTER_ARG).withRequiredArg().ofType(String.class); accepts(HELP_ARG).forHelp(); } }; OptionSet optionSet = null; try { optionSet = parser.parse(args); } catch (OptionException oe) { System.err.println("\nThere was an error parsing arguments: " + oe.getMessage() + "\n\n"); parser.printHelpOn(System.err); System.exit(1); } if (optionSet.has(HELP_ARG) || (!optionSet.has(SYNC_WITH_CLUSTER_ARG) && !optionSet.has(UPLOAD_WF_ARG)) && !optionSet.has(EXAMPLE_JSON_ARG) && !optionSet.has(UPLOAD_FILE_ARG) && !optionSet.has(GET_WORKSPACE_FILE_INFO_ARG) && !optionSet.has(UPDATE_PATH_ARG) && !optionSet.has(REGISTER_FILE_ARG) && !optionSet.has(RESAVE_WORKSPACEFILE_ARG) && !optionSet.has(RESAVE_JOB_ARG) && !optionSet.has(RESAVE_WORKFLOW_ARG) && !optionSet.has(PREVIEW_WORKFLOW_ARG) && !optionSet.has(GEN_OLD_KEPLER_XML_ARG) && !optionSet.has(GET_JOB_ARG) && !optionSet.has(GET_WORKSPACE_FILE_ARG) && !optionSet.has(GET_WORKFLOW_ARG)) { System.out.println(PROGRAM_HELP + "\n"); parser.printHelpOn(System.out); System.exit(0); } if (optionSet.has(EXAMPLE_JSON_ARG)) { renderExampleWorkflowsAndTasksAsJson(); System.exit(0); } if (optionSet.has(GET_JOB_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GET_JOB_ARG + " flag"); getJobAsJson(optionSet); System.exit(0); } if (optionSet.has(GET_WORKSPACE_FILE_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GET_WORKSPACE_FILE_ARG + " flag"); getWorkspaceFileAsJson(optionSet); System.exit(0); } if (optionSet.has(GET_WORKFLOW_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GET_WORKFLOW_ARG + " flag"); getWorkflowAsJson(optionSet); System.exit(0); } MultivaluedMapFactory multivaluedMapFactory = new MultivaluedMapFactoryImpl(); if (optionSet.has(GEN_OLD_KEPLER_XML_ARG)) { String workflowFileOrId = (String) optionSet.valueOf(GEN_OLD_KEPLER_XML_ARG); File workflowFile = new File(workflowFileOrId); Workflow w = null; //if value is a file attempt to load it as a workflow file if (workflowFile.exists() && workflowFile.isFile()) { w = getWorkflowFromFile(workflowFile); if (w == null) { throw new Exception("Unable to extract workflow from file: " + workflowFile); } } else { //assume the value is a workflow id and get it from the service //but fail if url is missing failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GEN_OLD_KEPLER_XML_ARG + " flag"); User u = getUserFromOptionSet(optionSet); WorkflowRestDAOImpl workflowDAO = new WorkflowRestDAOImpl(); workflowDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); workflowDAO.setUser(u); w = workflowDAO.getWorkflowById(workflowFileOrId, u); if (w == null) { throw new Exception("Unable to extract workflow from id: " + workflowFileOrId); } } VersionOneWorkflowXmlWriter xmlWriter = new VersionOneWorkflowXmlWriter(); StringWriter sw = new StringWriter(); xmlWriter.write(sw, w); System.out.println(sw.toString()); System.exit(0); } if (optionSet.has(PREVIEW_WORKFLOW_ARG)) { failIfOptionSetMissingURL(optionSet, "--" + PREVIEW_WORKFLOW_ARG + " flag"); File workflowFile = (File) optionSet.valueOf(PREVIEW_WORKFLOW_ARG); Workflow w = getWorkflowFromFile(workflowFile); if (w == null) { throw new Exception("Unable to extract workflow from file"); } uploadPreviewWorkflowFile((String) optionSet.valueOf(URL_ARG), w); System.exit(0); } if (optionSet.has(REGISTER_FILE_ARG)) { addNewWorkspaceFile(optionSet, false, REGISTER_FILE_ARG); System.exit(0); } if (optionSet.has(RESAVE_WORKSPACEFILE_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + RESAVE_WORKSPACEFILE_ARG + " flag"); WorkspaceFileRestDAOImpl workspaceFileDAO = new WorkspaceFileRestDAOImpl(); User u = getUserFromOptionSet(optionSet); workspaceFileDAO.setUser(u); workspaceFileDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); Long workspaceId = (Long) optionSet.valueOf(RESAVE_WORKSPACEFILE_ARG); if (workspaceId == -1) { System.out.println("Resaving all workspace files"); List<WorkspaceFile> wsfList = workspaceFileDAO.getWorkspaceFiles(null, null, null, null, null); if (wsfList != null) { System.out.println("Found " + wsfList.size() + " workspace files to resave"); for (WorkspaceFile wsf : wsfList) { System.out.println("WorkspaceFile Id: " + wsf.getId()); workspaceFileDAO.resave(wsf.getId()); } } } else { workspaceFileDAO.resave(workspaceId); } System.exit(0); } if (optionSet.has(RESAVE_JOB_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + RESAVE_JOB_ARG + " flag"); JobRestDAOImpl jobDAO = new JobRestDAOImpl(); User u = getUserFromOptionSet(optionSet); jobDAO.setUser(u); jobDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); Long jobId = (Long) optionSet.valueOf(RESAVE_JOB_ARG); if (jobId == -1) { System.out.println("Resaving all jobs"); List<Job> jobList = jobDAO.getJobs(null, null, null, true, true, Boolean.TRUE); if (jobList != null) { System.out.println("Found " + jobList.size() + " jobs to resave"); for (Job j : jobList) { System.out.println("job id: " + j.getId()); jobDAO.resave(j.getId()); } } } else { jobDAO.resave(jobId); } System.exit(0); } if (optionSet.has(RESAVE_WORKFLOW_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + RESAVE_WORKFLOW_ARG + " flag"); WorkflowRestDAOImpl workflowDAO = new WorkflowRestDAOImpl(); User u = getUserFromOptionSet(optionSet); workflowDAO.setUser(u); workflowDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); Long workflowId = (Long) optionSet.valueOf(RESAVE_WORKFLOW_ARG); if (workflowId == -1) { System.out.println("Resaving all workflows"); List<Workflow> workflowList = workflowDAO.getAllWorkflows(true, Boolean.TRUE); if (workflowList != null) { System.out.println("Found " + workflowList.size() + " workflow(s) to resave"); for (Workflow w : workflowList) { System.out.println("workflow id: " + w.getId()); workflowDAO.resave(w.getId()); } } } else { workflowDAO.resave(workflowId); } System.exit(0); } if (optionSet.has(UPDATE_PATH_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + UPDATE_PATH_ARG + " flag"); User u = getUserFromOptionSet(optionSet); String workspaceId = (String) optionSet.valueOf(UPDATE_PATH_ARG); String path = null; if (optionSet.has(PATH_ARG)) { path = (String) optionSet.valueOf(PATH_ARG); } String size = null; if (optionSet.has(SIZE_ARG)) { size = ((Long) optionSet.valueOf(SIZE_ARG)).toString(); } if (optionSet.has(MD5_ARG)) { //wsp.setMd5((String)optionSet.valueOf(MD5_ARG)); } Boolean isFailed = null; if (optionSet.has(WORKSPACE_FILE_FAILED_ARG)) { isFailed = (Boolean) optionSet.valueOf(WORKSPACE_FILE_FAILED_ARG); } WorkspaceFileRestDAOImpl workspaceFileDAO = new WorkspaceFileRestDAOImpl(); workspaceFileDAO.setUser(u); workspaceFileDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); workspaceFileDAO.updatePathSizeAndFailStatus(Long.parseLong(workspaceId), path, size, isFailed); System.exit(0); } if (optionSet.has(SYNC_WITH_CLUSTER_ARG)) { // @TODO NEED TO MAKE JOPT DO THIS REQUIRED FLAG CHECKING STUFF if (!optionSet.has(WF_EXEC_DIR_ARG)) { System.err.println( "-" + WF_EXEC_DIR_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(2); } if (!optionSet.has(WF_DIR_ARG)) { System.err.println("-" + WF_DIR_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(3); } if (!optionSet.has(KEPLER_SCRIPT_ARG)) { System.err.println( "-" + KEPLER_SCRIPT_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(4); } if (!optionSet.has(CAST_ARG)) { System.err.println("-" + CAST_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(5); } if (!optionSet.has(STAT_ARG)) { System.err.println("-" + STAT_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(6); } if (!optionSet.has(QUEUE_ARG)) { System.err.println("-" + QUEUE_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(7); } if (!optionSet.has(REGISTER_JAR_ARG)) { System.err.println( "-" + REGISTER_JAR_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(8); } failIfOptionSetMissingLoginOrToken(optionSet, "--" + SYNC_WITH_CLUSTER_ARG + " flag"); File castFile = (File) optionSet.valueOf(CAST_ARG); String castPath = castFile.getAbsolutePath(); File statFile = (File) optionSet.valueOf(STAT_ARG); String statPath = statFile.getAbsolutePath(); String queue = (String) optionSet.valueOf(QUEUE_ARG); File wfExecDir = (File) optionSet.valueOf(WF_EXEC_DIR_ARG); File wfDir = (File) optionSet.valueOf(WF_DIR_ARG); File keplerScript = (File) optionSet.valueOf(KEPLER_SCRIPT_ARG); String registerJar = null; if (optionSet.has(REGISTER_JAR_ARG)) { File registerJarFile = (File) optionSet.valueOf(REGISTER_JAR_ARG); registerJar = registerJarFile.getAbsolutePath(); } JobEmailNotificationData emailNotifyData = getJobEmailNotificationData(optionSet); User u = getUserFromOptionSet(optionSet); ObjectifyService.ofy(); String url = (String) optionSet.valueOf(SYNC_WITH_CLUSTER_ARG); JobRestDAOImpl jobDAO = new JobRestDAOImpl(); jobDAO.setRestURL(url); jobDAO.setUser(u); System.out.println("Running sync with cluster"); WorkspaceFileRestDAOImpl workspaceFileDAO = new WorkspaceFileRestDAOImpl(); workspaceFileDAO.setRestURL(url); workspaceFileDAO.setUser(u); JobPath jobPath = new JobPathImpl(wfExecDir.getAbsolutePath()); WorkspaceFilePathSetterImpl pathSetter = new WorkspaceFilePathSetterImpl(workspaceFileDAO); // Submit jobs to scheduler JobSubmissionManager submitter = new JobSubmissionManager(jobDAO, workspaceFileDAO, pathSetter, jobPath, wfDir.getAbsolutePath(), keplerScript.getAbsolutePath(), castPath, queue, u, url, registerJar, emailNotifyData); submitter.submitJobs(); // Update job status for all jobs in system MapOfJobStatusFactoryImpl jobStatusFactory = new MapOfJobStatusFactoryImpl(statPath); WorkflowFailedParser workflowFailedParser = new WorkflowFailedParserImpl(); JobStatusUpdater updater = new JobStatusUpdater(jobDAO, jobStatusFactory, workflowFailedParser, jobPath); updater.updateJobs(); System.exit(0); } if (optionSet.has(App.GET_WORKSPACE_FILE_INFO_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GET_WORKSPACE_FILE_INFO_ARG + " flag"); WorkspaceFileRestDAOImpl workspaceFileDAO = new WorkspaceFileRestDAOImpl(); workspaceFileDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); List<WorkspaceFile> wsFiles = workspaceFileDAO .getWorkspaceFilesById((String) optionSet.valueOf(GET_WORKSPACE_FILE_INFO_ARG), null); if (wsFiles != null) { ObjectMapper om = new ObjectMapper(); ObjectWriter ow = om.writerWithDefaultPrettyPrinter(); System.out.print("["); boolean first = true; for (WorkspaceFile wsf : wsFiles) { if (first == false) { System.out.println(","); } else { first = false; } System.out.print(ow.writeValueAsString(wsf)); } System.out.println("]"); } else { System.err.println("[]"); } System.exit(0); } if (optionSet.has(UPLOAD_FILE_ARG)) { addNewWorkspaceFile(optionSet, true, UPLOAD_FILE_ARG); System.exit(0); } if (optionSet.has(UPLOAD_WF_ARG)) { Long parentWfId = null; String postURL = null; if (optionSet.has(URL_ARG)) { postURL = (String) optionSet.valueOf(URL_ARG); failIfOptionSetMissingLoginOrToken(optionSet, "--" + UPLOAD_WF_ARG + " and --" + URL_ARG + " flag"); } File workflowFile = (File) optionSet.valueOf(UPLOAD_WF_ARG); Workflow w = getWorkflowFromFile(workflowFile); if (w != null) { if (optionSet.has(OWNER_ARG)) { w.setOwner((String) optionSet.valueOf(OWNER_ARG)); } ObjectMapper om = new ObjectMapper(); if (parentWfId != null) { w.setId(parentWfId); } if (postURL == null) { System.out.println("\n--- JSON Representation of Workflow ---"); ObjectWriter ow = om.writerWithDefaultPrettyPrinter(); System.out.println(ow.writeValueAsString(w)); System.out.flush(); System.out.println("---------------------------------------"); } else { postURL = new StringBuilder().append(postURL).append(Constants.SLASH) .append(Constants.REST_PATH).append(Constants.SLASH) .append(Constants.WORKFLOWS_PATH).toString(); ClientConfig cc = new DefaultClientConfig(); cc.getClasses().add(StringProvider.class); cc.getClasses().add(MultiPartWriter.class); Client client = Client.create(cc); client.setFollowRedirects(true); WebResource resource = client.resource(postURL); String workflowAsJson = om.writeValueAsString(w); User u = getUserFromOptionSet(optionSet); client.addFilter(new HTTPBasicAuthFilter(u.getLogin(), u.getToken())); MultivaluedMap queryParams = multivaluedMapFactory.getMultivaluedMap(u); String response = resource.queryParams(queryParams).type(MediaType.APPLICATION_JSON_TYPE) .entity(workflowAsJson).post(String.class); Workflow workflowRes = om.readValue(response, Workflow.class); ObjectWriter ow = om.writerWithDefaultPrettyPrinter(); if (workflowRes.getWorkflowFileUploadURL() == null) { throw new Exception( "No upload url found for workflow!!!" + ow.writeValueAsString(workflowRes)); } uploadWorkflowFile(workflowRes, workflowFile); } } } } catch (Exception ex) { ex.printStackTrace(); System.err.println("Caught Exception: " + ex.getMessage()); System.exit(2); } System.exit(0); }
From source file:org.bimserver.javamodelchecker.WriteToJson.java
public static void main(String[] args) { try {// w w w.j a v a 2s .c om ObjectMapper objectMapper = new ObjectMapper(); ObjectNode rootNode = objectMapper.createObjectNode(); ArrayNode array = objectMapper.createArrayNode(); rootNode.set("modelcheckers", array); ObjectNode objectNode = objectMapper.createObjectNode(); array.add(objectNode); objectNode.put("name", "Check window widths"); objectNode.put("description", "Check window widths"); objectNode.put("modelCheckerPluginClassName", "org.bimserver.javamodelchecker.JavaModelCheckerPlugin"); objectNode.put("code", changeClassName( FileUtils.readFileToString( new File("src/org/bimserver/javamodelchecker/WindowWidthChecker.java")), "WindowWidthChecker")); objectNode = objectMapper.createObjectNode(); array.add(objectNode); objectNode.put("name", "Pass always"); objectNode.put("description", "Pass always"); objectNode.put("modelCheckerPluginClassName", "org.bimserver.javamodelchecker.JavaModelCheckerPlugin"); objectNode.put("code", changeClassName( FileUtils.readFileToString(new File("src/org/bimserver/javamodelchecker/PassAlways.java")), "PassAlways")); objectNode = objectMapper.createObjectNode(); array.add(objectNode); objectNode.put("name", "Fail always"); objectNode.put("description", "Fail always"); objectNode.put("modelCheckerPluginClassName", "org.bimserver.javamodelchecker.JavaModelCheckerPlugin"); objectNode.put("code", changeClassName( FileUtils.readFileToString(new File("src/org/bimserver/javamodelchecker/FailAlways.java")), "FailAlways")); objectMapper.writerWithDefaultPrettyPrinter().writeValue(new File("modelcheckers.json"), rootNode); } catch (IOException e) { e.printStackTrace(); } }
From source file:org.wildfly.sample.http.management.client.WildflyCassandraJaxrsClient.java
private static String prettyPrintJson(String jsonString) throws IOException { ObjectMapper mapper = new ObjectMapper(); return mapper.writerWithDefaultPrettyPrinter() .writeValueAsString(mapper.readValue(jsonString, Object.class)); }
From source file:com.github.tomakehurst.wiremock.common.Json.java
public static <T> String write(T object) { try {/*from www.jav a 2 s.c o m*/ ObjectMapper mapper = new ObjectMapper(); return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); } catch (IOException ioe) { throw new RuntimeException("Unable to generate JSON from object. Reason: " + ioe.getMessage(), ioe); } }
From source file:com.jivesoftware.sdk.util.JiveSDKUtils.java
public static String getJson(Object object) { ObjectMapper mapper = new ObjectMapper(); try {//from www . j av a2 s .c om return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(object); } catch (IOException ioe) { log.error("Unknown Error", ioe); } // end try/catch return null; }
From source file:com.gtcgroup.justify.rest.helper.JstResponseUtilHelper.java
/** * @param responseContext/*from w ww . j av a2 s . c om*/ * @return {@link String} */ public static String formatResponseEntity(final ContainerResponseContext responseContext) { String message = null; final ObjectMapper mapper = new ObjectMapper(); try { message = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(responseContext.getEntity()); } catch (final Exception e) { throw new TestingRuntimeException(e); } return message; }
From source file:m.omarh.liferay.resources.importer.generator.util.JSONUtil.java
public static String beautify(String json) throws IOException { ObjectMapper mapper = new ObjectMapper(); Object obj = mapper.readValue(json, Object.class); return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(obj); }
From source file:com.mesosphere.sdk.config.SerializationUtils.java
/** * Returns a representation of the provided value using the provided custom object mapper. *//*from w w w . j a va2s. c om*/ public static <T> String toString(T value, ObjectMapper mapper) throws IOException { return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(value); }