List of usage examples for com.fasterxml.jackson.databind ObjectMapper writeValueAsString
@SuppressWarnings("resource") public String writeValueAsString(Object value) throws JsonProcessingException
From source file:HelloSmartsheet.java
public static void main(String[] args) { HttpURLConnection connection = null; StringBuilder response = new StringBuilder(); //We are using Jackson JSON parser to deserialize the JSON. See http://wiki.fasterxml.com/JacksonHome //Feel free to use which ever library you prefer. ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); try {//w w w . j a v a 2 s .c o m System.out.println("STARTING HelloSmartsheet..."); //Create a BufferedReader to read user input. BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); System.out.print("Enter Smartsheet API access token:"); String accessToken = in.readLine(); System.out.println("Fetching list of your sheets..."); //Create a connection and fetch the list of sheets connection = (HttpURLConnection) new URL(GET_SHEETS_URL).openConnection(); connection.addRequestProperty("Authorization", "Bearer " + accessToken); BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream())); String line; //Read the response line by line. while ((line = reader.readLine()) != null) { response.append(line); } reader.close(); //Use Jackson to conver the JSON string to a List of Sheets List<Sheet> sheets = mapper.readValue(response.toString(), new TypeReference<List<Sheet>>() { }); if (sheets.size() == 0) { System.out.println("You don't have any sheets. Goodbye!"); return; } System.out.println("Total sheets: " + sheets.size()); int i = 1; for (Sheet sheet : sheets) { System.out.println(i++ + ": " + sheet.name); } System.out.print("Enter the number of the sheet you want to share: "); //Prompt the user to provide the sheet number, the email address, and the access level Integer sheetNumber = Integer.parseInt(in.readLine().trim()); //NOTE: for simplicity, error handling and input validation is neglected. Sheet chosenSheet = sheets.get(sheetNumber - 1); System.out.print("Enter an email address to share " + chosenSheet.getName() + " to: "); String email = in.readLine(); System.out.print("Choose an access level (VIEWER, EDITOR, EDITOR_SHARE, ADMIN) for " + email + ": "); String accessLevel = in.readLine(); //Create a share object Share share = new Share(); share.setEmail(email); share.setAccessLevel(accessLevel); System.out.println("Sharing " + chosenSheet.name + " to " + email + " as " + accessLevel + "."); //Create a connection. Note the SHARE_SHEET_URL uses /sheet as opposed to /sheets (with an 's') connection = (HttpURLConnection) new URL(SHARE_SHEET_URL.replace(SHEET_ID, "" + chosenSheet.getId())) .openConnection(); connection.setDoOutput(true); connection.addRequestProperty("Authorization", "Bearer " + accessToken); connection.addRequestProperty("Content-Type", "application/json"); OutputStreamWriter writer = new OutputStreamWriter(connection.getOutputStream()); //Serialize the Share object writer.write(mapper.writeValueAsString(share)); writer.close(); //Read the response and parse the JSON reader = new BufferedReader(new InputStreamReader(connection.getInputStream())); response = new StringBuilder(); while ((line = reader.readLine()) != null) { response.append(line); } Result result = mapper.readValue(response.toString(), Result.class); System.out.println("Sheet shared successfully, share ID " + result.result.id); System.out.println("Press any key to quit."); in.read(); } catch (IOException e) { BufferedReader reader = new BufferedReader( new InputStreamReader(((HttpURLConnection) connection).getErrorStream())); String line; try { response = new StringBuilder(); while ((line = reader.readLine()) != null) { response.append(line); } reader.close(); Result result = mapper.readValue(response.toString(), Result.class); System.out.println(result.message); } catch (IOException e1) { e1.printStackTrace(); } } catch (Exception e) { System.out.println("Something broke: " + e.getMessage()); e.printStackTrace(); } }
From source file:edu.ucsd.crbs.cws.App.java
License:asdf
public static void main(String[] args) { Job.REFS_ENABLED = false;// w w w.java2 s . c om Workflow.REFS_ENABLED = false; try { OptionParser parser = new OptionParser() { { accepts(UPLOAD_WF_ARG, "Add/Update Workflow").withRequiredArg().ofType(File.class) .describedAs("Kepler .kar file"); //accepts(LOAD_TEST,"creates lots of workflows and jobs"); accepts(SYNC_WITH_CLUSTER_ARG, "Submits & Synchronizes Workflow Jobs on local cluster with CRBS Workflow Webservice. Requires --" + PROJECT_ARG + " --" + PORTALNAME_ARG + " --" + PORTAL_URL_ARG + " --" + HELP_EMAIL_ARG).withRequiredArg().ofType(String.class).describedAs("URL"); accepts(GEN_OLD_KEPLER_XML_ARG, "Generates version 1.x kepler xml for given workflow") .withRequiredArg().ofType(String.class).describedAs("wfid or .kar file"); accepts(UPLOAD_FILE_ARG, "Registers and uploads Workspace file to REST service") .withRequiredArg().ofType(File.class); accepts(REGISTER_FILE_ARG, "Registers Workspace file to REST service (DOES NOT UPLOAD FILE TO REST SERVICE)") .withRequiredArg().ofType(File.class); accepts(GET_WORKSPACE_FILE_INFO_ARG, "Outputs JSON of specified workspace file(s)") .withRequiredArg().ofType(String.class).describedAs("workspace file id"); accepts(GET_WORKFLOW_ARG, "Outputs JSON of specified Workflow").withRequiredArg() .ofType(Long.class).describedAs("Workflow Id"); accepts(DOWNLOAD_FILE_ARG, "Downloads Workspace file").withRequiredArg().ofType(String.class) .describedAs("workspace file id"); accepts(UPDATE_PATH_ARG, "Updates Workspace file path").withRequiredArg().ofType(String.class) .describedAs("workspace file id"); accepts(PATH_ARG, "Sets WorkspaceFile file path. Used in coordination with --" + UPDATE_PATH_ARG) .withRequiredArg().ofType(String.class).describedAs("file path"); accepts(URL_ARG, "URL to use with --" + UPLOAD_WF_ARG + ", --" + UPLOAD_FILE_ARG + ", --" + GET_WORKSPACE_FILE_INFO_ARG + " flags").withRequiredArg().ofType(String.class) .describedAs("URL"); accepts(EXAMPLE_JSON_ARG, "Outputs example JSON of Job, User, Workflow, and WorkspaceFile objects"); accepts(WF_EXEC_DIR_ARG, "Workflow Execution Directory").withRequiredArg().ofType(File.class) .describedAs("Directory"); accepts(WF_DIR_ARG, "Workflows Directory").withRequiredArg().ofType(File.class) .describedAs("Directory"); accepts(KEPLER_SCRIPT_ARG, "Kepler").withRequiredArg().ofType(File.class).describedAs("Script"); accepts(QUEUE_ARG, "SGE Queue").withRequiredArg().ofType(String.class).describedAs("Queue"); accepts(CAST_ARG, "Panfishcast binary").withRequiredArg().ofType(File.class) .describedAs("panfishcast"); accepts(STAT_ARG, "Panfishstat binary").withRequiredArg().ofType(File.class) .describedAs("panfishstat"); accepts(LOGIN_ARG, "User Login").withRequiredArg().ofType(String.class).describedAs("username"); accepts(TOKEN_ARG, "User Token").withRequiredArg().ofType(String.class).describedAs("token"); accepts(RUN_AS_ARG, "User to run as (for power accounts that can run as other users)") .withRequiredArg().ofType(String.class).describedAs("runas"); accepts(OWNER_ARG, "Sets owner when creating Workspace file and Workflow").withRequiredArg() .ofType(String.class).describedAs("username"); accepts(JOB_ID_ARG, "Sets source job id for Workspace file when used with --" + UPLOAD_FILE_ARG + " and --" + REGISTER_FILE_ARG).withRequiredArg().ofType(Long.class) .describedAs("Job Id"); accepts(MD5_ARG, "Sets md5 for Workspace file when used with --" + UPLOAD_FILE_ARG + " and --" + REGISTER_FILE_ARG).withRequiredArg().ofType(String.class) .describedAs("MD5 message digest"); accepts(SIZE_ARG, "Sets size in bytes for Workspace file when used with --" + UPLOAD_FILE_ARG + " and --" + REGISTER_FILE_ARG).withRequiredArg().ofType(Long.class) .describedAs("Size of file/dir in bytes"); accepts(RESAVE_WORKSPACEFILE_ARG, "Resaves Workspace file").withRequiredArg().ofType(Long.class) .describedAs("WorkspaceFile Id or -1 to resave all"); accepts(RESAVE_JOB_ARG, "Resaves Job").withRequiredArg().ofType(Long.class) .describedAs("Job Id or -1 to resave all"); accepts(RESAVE_WORKFLOW_ARG, "Resaves Workflow").withRequiredArg().ofType(Long.class) .describedAs("Workflow Id or -1 to resave all"); accepts(PREVIEW_WORKFLOW_ARG, "Preview Workflow on Web, requires --" + URL_ARG + " currently it should be: http://imafish.dynamic.ucsd.edu/cws/makepreview") .withRequiredArg().ofType(File.class).describedAs("Kepler .kar file"); accepts(DESCRIPTION_ARG, "Description for WorkspaceFile").withRequiredArg() .ofType(String.class); accepts(TYPE_ARG, "Type of WorkspaceFile").withRequiredArg().ofType(String.class); accepts(NAME_ARG, "Sets name for Workspace file when used with --" + UPLOAD_FILE_ARG + " and --" + REGISTER_FILE_ARG).withRequiredArg().ofType(String.class) .describedAs("WorkspaceFile name"); accepts(REGISTER_JAR_ARG, "Path to Jar to register WorkspaceFiles").withRequiredArg() .ofType(File.class).describedAs("Path to this jar"); accepts(GET_JOB_ARG, "Gets job from service in JSON format, requires --" + URL_ARG) .withRequiredArg().ofType(Long.class).describedAs("Job Id"); accepts(GET_WORKSPACE_FILE_ARG, "Gets WorkspaceFile from service in JSON format, requires --" + URL_ARG) .withRequiredArg().ofType(Long.class) .describedAs("WorkspaceFile Id or -1 for all"); accepts(PROJECT_ARG, "Project name ie CRBS. Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(PORTALNAME_ARG, "Portal name ie SLASH portal Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(PORTAL_URL_ARG, "Portal url ie http://slashsegmentation.com Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(HELP_EMAIL_ARG, "Help and reply to email address Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(BCC_EMAIL_ARG, "Blind Carbon copy email address Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(WORKSPACE_FILE_FAILED_ARG, "Denotes whether workspacefile failed (true) or not (false). Used with --" + UPDATE_PATH_ARG).withRequiredArg().ofType(Boolean.class) .describedAs("false = success and true = failed"); accepts(ERROR_EMAIL_ARG, "Email to receive notifications if errors are encountered. Used with --" + SYNC_WITH_CLUSTER_ARG).withRequiredArg().ofType(String.class); accepts(HELP_ARG).forHelp(); } }; OptionSet optionSet = null; try { optionSet = parser.parse(args); } catch (OptionException oe) { System.err.println("\nThere was an error parsing arguments: " + oe.getMessage() + "\n\n"); parser.printHelpOn(System.err); System.exit(1); } if (optionSet.has(HELP_ARG) || (!optionSet.has(SYNC_WITH_CLUSTER_ARG) && !optionSet.has(UPLOAD_WF_ARG)) && !optionSet.has(EXAMPLE_JSON_ARG) && !optionSet.has(UPLOAD_FILE_ARG) && !optionSet.has(GET_WORKSPACE_FILE_INFO_ARG) && !optionSet.has(UPDATE_PATH_ARG) && !optionSet.has(REGISTER_FILE_ARG) && !optionSet.has(RESAVE_WORKSPACEFILE_ARG) && !optionSet.has(RESAVE_JOB_ARG) && !optionSet.has(RESAVE_WORKFLOW_ARG) && !optionSet.has(PREVIEW_WORKFLOW_ARG) && !optionSet.has(GEN_OLD_KEPLER_XML_ARG) && !optionSet.has(GET_JOB_ARG) && !optionSet.has(GET_WORKSPACE_FILE_ARG) && !optionSet.has(GET_WORKFLOW_ARG)) { System.out.println(PROGRAM_HELP + "\n"); parser.printHelpOn(System.out); System.exit(0); } if (optionSet.has(EXAMPLE_JSON_ARG)) { renderExampleWorkflowsAndTasksAsJson(); System.exit(0); } if (optionSet.has(GET_JOB_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GET_JOB_ARG + " flag"); getJobAsJson(optionSet); System.exit(0); } if (optionSet.has(GET_WORKSPACE_FILE_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GET_WORKSPACE_FILE_ARG + " flag"); getWorkspaceFileAsJson(optionSet); System.exit(0); } if (optionSet.has(GET_WORKFLOW_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GET_WORKFLOW_ARG + " flag"); getWorkflowAsJson(optionSet); System.exit(0); } MultivaluedMapFactory multivaluedMapFactory = new MultivaluedMapFactoryImpl(); if (optionSet.has(GEN_OLD_KEPLER_XML_ARG)) { String workflowFileOrId = (String) optionSet.valueOf(GEN_OLD_KEPLER_XML_ARG); File workflowFile = new File(workflowFileOrId); Workflow w = null; //if value is a file attempt to load it as a workflow file if (workflowFile.exists() && workflowFile.isFile()) { w = getWorkflowFromFile(workflowFile); if (w == null) { throw new Exception("Unable to extract workflow from file: " + workflowFile); } } else { //assume the value is a workflow id and get it from the service //but fail if url is missing failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GEN_OLD_KEPLER_XML_ARG + " flag"); User u = getUserFromOptionSet(optionSet); WorkflowRestDAOImpl workflowDAO = new WorkflowRestDAOImpl(); workflowDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); workflowDAO.setUser(u); w = workflowDAO.getWorkflowById(workflowFileOrId, u); if (w == null) { throw new Exception("Unable to extract workflow from id: " + workflowFileOrId); } } VersionOneWorkflowXmlWriter xmlWriter = new VersionOneWorkflowXmlWriter(); StringWriter sw = new StringWriter(); xmlWriter.write(sw, w); System.out.println(sw.toString()); System.exit(0); } if (optionSet.has(PREVIEW_WORKFLOW_ARG)) { failIfOptionSetMissingURL(optionSet, "--" + PREVIEW_WORKFLOW_ARG + " flag"); File workflowFile = (File) optionSet.valueOf(PREVIEW_WORKFLOW_ARG); Workflow w = getWorkflowFromFile(workflowFile); if (w == null) { throw new Exception("Unable to extract workflow from file"); } uploadPreviewWorkflowFile((String) optionSet.valueOf(URL_ARG), w); System.exit(0); } if (optionSet.has(REGISTER_FILE_ARG)) { addNewWorkspaceFile(optionSet, false, REGISTER_FILE_ARG); System.exit(0); } if (optionSet.has(RESAVE_WORKSPACEFILE_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + RESAVE_WORKSPACEFILE_ARG + " flag"); WorkspaceFileRestDAOImpl workspaceFileDAO = new WorkspaceFileRestDAOImpl(); User u = getUserFromOptionSet(optionSet); workspaceFileDAO.setUser(u); workspaceFileDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); Long workspaceId = (Long) optionSet.valueOf(RESAVE_WORKSPACEFILE_ARG); if (workspaceId == -1) { System.out.println("Resaving all workspace files"); List<WorkspaceFile> wsfList = workspaceFileDAO.getWorkspaceFiles(null, null, null, null, null); if (wsfList != null) { System.out.println("Found " + wsfList.size() + " workspace files to resave"); for (WorkspaceFile wsf : wsfList) { System.out.println("WorkspaceFile Id: " + wsf.getId()); workspaceFileDAO.resave(wsf.getId()); } } } else { workspaceFileDAO.resave(workspaceId); } System.exit(0); } if (optionSet.has(RESAVE_JOB_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + RESAVE_JOB_ARG + " flag"); JobRestDAOImpl jobDAO = new JobRestDAOImpl(); User u = getUserFromOptionSet(optionSet); jobDAO.setUser(u); jobDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); Long jobId = (Long) optionSet.valueOf(RESAVE_JOB_ARG); if (jobId == -1) { System.out.println("Resaving all jobs"); List<Job> jobList = jobDAO.getJobs(null, null, null, true, true, Boolean.TRUE); if (jobList != null) { System.out.println("Found " + jobList.size() + " jobs to resave"); for (Job j : jobList) { System.out.println("job id: " + j.getId()); jobDAO.resave(j.getId()); } } } else { jobDAO.resave(jobId); } System.exit(0); } if (optionSet.has(RESAVE_WORKFLOW_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + RESAVE_WORKFLOW_ARG + " flag"); WorkflowRestDAOImpl workflowDAO = new WorkflowRestDAOImpl(); User u = getUserFromOptionSet(optionSet); workflowDAO.setUser(u); workflowDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); Long workflowId = (Long) optionSet.valueOf(RESAVE_WORKFLOW_ARG); if (workflowId == -1) { System.out.println("Resaving all workflows"); List<Workflow> workflowList = workflowDAO.getAllWorkflows(true, Boolean.TRUE); if (workflowList != null) { System.out.println("Found " + workflowList.size() + " workflow(s) to resave"); for (Workflow w : workflowList) { System.out.println("workflow id: " + w.getId()); workflowDAO.resave(w.getId()); } } } else { workflowDAO.resave(workflowId); } System.exit(0); } if (optionSet.has(UPDATE_PATH_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + UPDATE_PATH_ARG + " flag"); User u = getUserFromOptionSet(optionSet); String workspaceId = (String) optionSet.valueOf(UPDATE_PATH_ARG); String path = null; if (optionSet.has(PATH_ARG)) { path = (String) optionSet.valueOf(PATH_ARG); } String size = null; if (optionSet.has(SIZE_ARG)) { size = ((Long) optionSet.valueOf(SIZE_ARG)).toString(); } if (optionSet.has(MD5_ARG)) { //wsp.setMd5((String)optionSet.valueOf(MD5_ARG)); } Boolean isFailed = null; if (optionSet.has(WORKSPACE_FILE_FAILED_ARG)) { isFailed = (Boolean) optionSet.valueOf(WORKSPACE_FILE_FAILED_ARG); } WorkspaceFileRestDAOImpl workspaceFileDAO = new WorkspaceFileRestDAOImpl(); workspaceFileDAO.setUser(u); workspaceFileDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); workspaceFileDAO.updatePathSizeAndFailStatus(Long.parseLong(workspaceId), path, size, isFailed); System.exit(0); } if (optionSet.has(SYNC_WITH_CLUSTER_ARG)) { // @TODO NEED TO MAKE JOPT DO THIS REQUIRED FLAG CHECKING STUFF if (!optionSet.has(WF_EXEC_DIR_ARG)) { System.err.println( "-" + WF_EXEC_DIR_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(2); } if (!optionSet.has(WF_DIR_ARG)) { System.err.println("-" + WF_DIR_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(3); } if (!optionSet.has(KEPLER_SCRIPT_ARG)) { System.err.println( "-" + KEPLER_SCRIPT_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(4); } if (!optionSet.has(CAST_ARG)) { System.err.println("-" + CAST_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(5); } if (!optionSet.has(STAT_ARG)) { System.err.println("-" + STAT_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(6); } if (!optionSet.has(QUEUE_ARG)) { System.err.println("-" + QUEUE_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(7); } if (!optionSet.has(REGISTER_JAR_ARG)) { System.err.println( "-" + REGISTER_JAR_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(8); } failIfOptionSetMissingLoginOrToken(optionSet, "--" + SYNC_WITH_CLUSTER_ARG + " flag"); File castFile = (File) optionSet.valueOf(CAST_ARG); String castPath = castFile.getAbsolutePath(); File statFile = (File) optionSet.valueOf(STAT_ARG); String statPath = statFile.getAbsolutePath(); String queue = (String) optionSet.valueOf(QUEUE_ARG); File wfExecDir = (File) optionSet.valueOf(WF_EXEC_DIR_ARG); File wfDir = (File) optionSet.valueOf(WF_DIR_ARG); File keplerScript = (File) optionSet.valueOf(KEPLER_SCRIPT_ARG); String registerJar = null; if (optionSet.has(REGISTER_JAR_ARG)) { File registerJarFile = (File) optionSet.valueOf(REGISTER_JAR_ARG); registerJar = registerJarFile.getAbsolutePath(); } JobEmailNotificationData emailNotifyData = getJobEmailNotificationData(optionSet); User u = getUserFromOptionSet(optionSet); ObjectifyService.ofy(); String url = (String) optionSet.valueOf(SYNC_WITH_CLUSTER_ARG); JobRestDAOImpl jobDAO = new JobRestDAOImpl(); jobDAO.setRestURL(url); jobDAO.setUser(u); System.out.println("Running sync with cluster"); WorkspaceFileRestDAOImpl workspaceFileDAO = new WorkspaceFileRestDAOImpl(); workspaceFileDAO.setRestURL(url); workspaceFileDAO.setUser(u); JobPath jobPath = new JobPathImpl(wfExecDir.getAbsolutePath()); WorkspaceFilePathSetterImpl pathSetter = new WorkspaceFilePathSetterImpl(workspaceFileDAO); // Submit jobs to scheduler JobSubmissionManager submitter = new JobSubmissionManager(jobDAO, workspaceFileDAO, pathSetter, jobPath, wfDir.getAbsolutePath(), keplerScript.getAbsolutePath(), castPath, queue, u, url, registerJar, emailNotifyData); submitter.submitJobs(); // Update job status for all jobs in system MapOfJobStatusFactoryImpl jobStatusFactory = new MapOfJobStatusFactoryImpl(statPath); WorkflowFailedParser workflowFailedParser = new WorkflowFailedParserImpl(); JobStatusUpdater updater = new JobStatusUpdater(jobDAO, jobStatusFactory, workflowFailedParser, jobPath); updater.updateJobs(); System.exit(0); } if (optionSet.has(App.GET_WORKSPACE_FILE_INFO_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GET_WORKSPACE_FILE_INFO_ARG + " flag"); WorkspaceFileRestDAOImpl workspaceFileDAO = new WorkspaceFileRestDAOImpl(); workspaceFileDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); List<WorkspaceFile> wsFiles = workspaceFileDAO .getWorkspaceFilesById((String) optionSet.valueOf(GET_WORKSPACE_FILE_INFO_ARG), null); if (wsFiles != null) { ObjectMapper om = new ObjectMapper(); ObjectWriter ow = om.writerWithDefaultPrettyPrinter(); System.out.print("["); boolean first = true; for (WorkspaceFile wsf : wsFiles) { if (first == false) { System.out.println(","); } else { first = false; } System.out.print(ow.writeValueAsString(wsf)); } System.out.println("]"); } else { System.err.println("[]"); } System.exit(0); } if (optionSet.has(UPLOAD_FILE_ARG)) { addNewWorkspaceFile(optionSet, true, UPLOAD_FILE_ARG); System.exit(0); } if (optionSet.has(UPLOAD_WF_ARG)) { Long parentWfId = null; String postURL = null; if (optionSet.has(URL_ARG)) { postURL = (String) optionSet.valueOf(URL_ARG); failIfOptionSetMissingLoginOrToken(optionSet, "--" + UPLOAD_WF_ARG + " and --" + URL_ARG + " flag"); } File workflowFile = (File) optionSet.valueOf(UPLOAD_WF_ARG); Workflow w = getWorkflowFromFile(workflowFile); if (w != null) { if (optionSet.has(OWNER_ARG)) { w.setOwner((String) optionSet.valueOf(OWNER_ARG)); } ObjectMapper om = new ObjectMapper(); if (parentWfId != null) { w.setId(parentWfId); } if (postURL == null) { System.out.println("\n--- JSON Representation of Workflow ---"); ObjectWriter ow = om.writerWithDefaultPrettyPrinter(); System.out.println(ow.writeValueAsString(w)); System.out.flush(); System.out.println("---------------------------------------"); } else { postURL = new StringBuilder().append(postURL).append(Constants.SLASH) .append(Constants.REST_PATH).append(Constants.SLASH) .append(Constants.WORKFLOWS_PATH).toString(); ClientConfig cc = new DefaultClientConfig(); cc.getClasses().add(StringProvider.class); cc.getClasses().add(MultiPartWriter.class); Client client = Client.create(cc); client.setFollowRedirects(true); WebResource resource = client.resource(postURL); String workflowAsJson = om.writeValueAsString(w); User u = getUserFromOptionSet(optionSet); client.addFilter(new HTTPBasicAuthFilter(u.getLogin(), u.getToken())); MultivaluedMap queryParams = multivaluedMapFactory.getMultivaluedMap(u); String response = resource.queryParams(queryParams).type(MediaType.APPLICATION_JSON_TYPE) .entity(workflowAsJson).post(String.class); Workflow workflowRes = om.readValue(response, Workflow.class); ObjectWriter ow = om.writerWithDefaultPrettyPrinter(); if (workflowRes.getWorkflowFileUploadURL() == null) { throw new Exception( "No upload url found for workflow!!!" + ow.writeValueAsString(workflowRes)); } uploadWorkflowFile(workflowRes, workflowFile); } } } } catch (Exception ex) { ex.printStackTrace(); System.err.println("Caught Exception: " + ex.getMessage()); System.exit(2); } System.exit(0); }
From source file:com.twentyn.patentSearch.DocumentSearch.java
public static void main(String[] args) throws Exception { System.out.println("Starting up..."); System.out.flush();//from ww w .ja v a2 s.c om Options opts = new Options(); opts.addOption(Option.builder("x").longOpt("index").hasArg().required().desc("Path to index file to read") .build()); opts.addOption(Option.builder("h").longOpt("help").desc("Print this help message and exit").build()); opts.addOption(Option.builder("v").longOpt("verbose").desc("Print verbose log output").build()); opts.addOption(Option.builder("f").longOpt("field").hasArg().desc("The indexed field to search").build()); opts.addOption( Option.builder("q").longOpt("query").hasArg().desc("The query to use when searching").build()); opts.addOption(Option.builder("l").longOpt("list-file").hasArg() .desc("A file containing a list of queries to run in sequence").build()); opts.addOption( Option.builder("e").longOpt("enumerate").desc("Enumerate the documents in the index").build()); opts.addOption(Option.builder("d").longOpt("dump").hasArg() .desc("Dump terms in the document index for a specified field").build()); opts.addOption( Option.builder("o").longOpt("output").hasArg().desc("Write results JSON to this file.").build()); opts.addOption(Option.builder("n").longOpt("inchi-field").hasArg() .desc("The index of the InChI field if an input TSV is specified.").build()); opts.addOption(Option.builder("s").longOpt("synonym-field").hasArg() .desc("The index of the chemical synonym field if an input TSV is specified.").build()); HelpFormatter helpFormatter = new HelpFormatter(); CommandLineParser cmdLineParser = new DefaultParser(); CommandLine cmdLine = null; try { cmdLine = cmdLineParser.parse(opts, args); } catch (ParseException e) { System.out.println("Caught exception when parsing command line: " + e.getMessage()); helpFormatter.printHelp("DocumentIndexer", opts); System.exit(1); } if (cmdLine.hasOption("help")) { helpFormatter.printHelp("DocumentIndexer", opts); System.exit(0); } if (!(cmdLine.hasOption("enumerate") || cmdLine.hasOption("dump") || (cmdLine.hasOption("field") && (cmdLine.hasOption("query") || cmdLine.hasOption("list-file"))))) { System.out.println("Must specify one of 'enumerate', 'dump', or 'field' + {'query', 'list-file'}"); helpFormatter.printHelp("DocumentIndexer", opts); System.exit(1); } if (cmdLine.hasOption("verbose")) { // With help from http://stackoverflow.com/questions/23434252/programmatically-change-log-level-in-log4j2 LoggerContext ctx = (LoggerContext) LogManager.getContext(false); Configuration ctxConfig = ctx.getConfiguration(); LoggerConfig logConfig = ctxConfig.getLoggerConfig(LogManager.ROOT_LOGGER_NAME); logConfig.setLevel(Level.DEBUG); ctx.updateLoggers(); LOGGER.debug("Verbose logging enabled"); } ObjectMapper objectMapper = new ObjectMapper(); objectMapper.enable(SerializationFeature.INDENT_OUTPUT); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); LOGGER.info("Opening index at " + cmdLine.getOptionValue("index")); try (Directory indexDir = FSDirectory.open(new File(cmdLine.getOptionValue("index")).toPath()); IndexReader indexReader = DirectoryReader.open(indexDir);) { if (cmdLine.hasOption("enumerate")) { /* Enumerate all documents in the index. * With help from * http://stackoverflow.com/questions/2311845/is-it-possible-to-iterate-through-documents-stored-in-lucene-index */ for (int i = 0; i < indexReader.maxDoc(); i++) { Document doc = indexReader.document(i); LOGGER.info("Doc " + i + ":"); LOGGER.info(doc); } } else if (cmdLine.hasOption("dump")) { /* Dump indexed terms for a specific field. * With help from http://stackoverflow.com/questions/11148036/find-list-of-terms-indexed-by-lucene */ Terms terms = SlowCompositeReaderWrapper.wrap(indexReader).terms(cmdLine.getOptionValue("dump")); LOGGER.info("Has positions: " + terms.hasPositions()); LOGGER.info("Has offsets: " + terms.hasOffsets()); LOGGER.info("Has freqs: " + terms.hasFreqs()); LOGGER.info("Stats: " + terms.getStats()); LOGGER.info(terms); TermsEnum termsEnum = terms.iterator(); BytesRef br = null; while ((br = termsEnum.next()) != null) { LOGGER.info(" " + br.utf8ToString()); } } else { IndexSearcher searcher = new IndexSearcher(indexReader); String field = cmdLine.getOptionValue("field"); List<Pair<String, String>> queries = null; if (cmdLine.hasOption("query")) { queries = Collections.singletonList(Pair.of("", cmdLine.getOptionValue("query"))); } else if (cmdLine.hasOption("list-file")) { if (!(cmdLine.hasOption("inchi-field") && cmdLine.hasOption("synonym-field"))) { LOGGER.error("Must specify both inchi-field and synonym-field when using list-file."); System.exit(1); } Integer inchiField = Integer.parseInt(cmdLine.getOptionValue("inchi-field")); Integer synonymField = Integer.parseInt(cmdLine.getOptionValue("synonym-field")); queries = new LinkedList<>(); BufferedReader r = new BufferedReader(new FileReader(cmdLine.getOptionValue("list-file"))); String line; while ((line = r.readLine()) != null) { line = line.trim(); if (!line.isEmpty()) { // TODO: use a proper TSV reader; this is intentionally terrible as is. String[] fields = line.split("\t"); queries.add(Pair.of(fields[inchiField].replace("\"", ""), fields[synonymField])); } } r.close(); } if (queries == null || queries.size() == 0) { LOGGER.error("Found no queries to run."); return; } List<SearchResult> searchResults = new ArrayList<>(queries.size()); for (Pair<String, String> queryPair : queries) { String inchi = queryPair.getLeft(); String rawQueryString = queryPair.getRight(); /* The Lucene query parser interprets the kind of structural annotations we see in chemical entities * as query directives, which is not what we want at all. Phrase queries seem to work adequately * with the analyzer we're currently using. */ String queryString = rawQueryString.trim().toLowerCase(); String[] parts = queryString.split("\\s+"); PhraseQuery query = new PhraseQuery(); for (String p : parts) { query.add(new Term(field, p)); } LOGGER.info("Running query: " + query.toString()); BooleanQuery bq = new BooleanQuery(); bq.add(query, BooleanClause.Occur.MUST); bq.add(new TermQuery(new Term(field, "yeast")), BooleanClause.Occur.SHOULD); bq.add(new TermQuery(new Term(field, "ferment")), BooleanClause.Occur.SHOULD); bq.add(new TermQuery(new Term(field, "fermentation")), BooleanClause.Occur.SHOULD); bq.add(new TermQuery(new Term(field, "fermentive")), BooleanClause.Occur.SHOULD); bq.add(new TermQuery(new Term(field, "saccharomyces")), BooleanClause.Occur.SHOULD); LOGGER.info(" Full query: " + bq.toString()); TopDocs topDocs = searcher.search(bq, 100); ScoreDoc[] scoreDocs = topDocs.scoreDocs; if (scoreDocs.length == 0) { LOGGER.info("Search returned no results."); } List<ResultDocument> results = new ArrayList<>(scoreDocs.length); for (int i = 0; i < scoreDocs.length; i++) { ScoreDoc scoreDoc = scoreDocs[i]; Document doc = indexReader.document(scoreDoc.doc); LOGGER.info("Doc " + i + ": " + scoreDoc.doc + ", score " + scoreDoc.score + ": " + doc.get("id") + ", " + doc.get("title")); results.add(new ResultDocument(scoreDoc.doc, scoreDoc.score, doc.get("title"), doc.get("id"), null)); } LOGGER.info("----- Done with query " + query.toString()); // TODO: reduce memory usage when not writing results to an output file. searchResults.add(new SearchResult(inchi, rawQueryString, bq, results)); } if (cmdLine.hasOption("output")) { try (FileWriter writer = new FileWriter(cmdLine.getOptionValue("output"));) { writer.write(objectMapper.writeValueAsString(searchResults)); } } } } }
From source file:io.fabric8.devops.projects.finder.gogs.JsonHelper.java
public static String toJson(Object dto) throws JsonProcessingException { ObjectMapper mapper = createObjectMapper(); return mapper.writeValueAsString(dto); }
From source file:com.proofpoint.event.client.TestingUtils.java
public static String getNormalizedJson(String resource) throws IOException { String json = Resources.toString(Resources.getResource(resource), Charsets.UTF_8); ObjectMapper mapper = new ObjectMapper(); return mapper.writeValueAsString(mapper.readValue(json, Object.class)); }
From source file:net.poemerchant.util.JsonUtils.java
public static String asString(Map<?, ?> map) throws JsonProcessingException { ObjectMapper objectMapper = new ObjectMapper(); String json = objectMapper.writeValueAsString(map); return json;/*from w ww . j a va 2 s .c o m*/ }
From source file:com.mafia.server.util.JacksonUtils.java
public static String objectToString(Object t) { try {// w w w .j a v a 2 s .c o m ObjectMapper objectMapper = new ObjectMapper(); return objectMapper.writeValueAsString(t); } catch (JsonProcessingException ex) { Logger.getLogger(JacksonUtils.class.getName()).log(Level.SEVERE, null, ex); } return null; }
From source file:com.googlecode.batchfb.util.JSONUtils.java
/** * Converts the object to a JSON string using the mapper *//*from ww w.java 2 s .co m*/ public static String toJSON(Object value, ObjectMapper mapper) { try { return mapper.writeValueAsString(value); } catch (IOException ex) { throw new FacebookException(ex); } }
From source file:persistence.ContactPersistence.java
public static void createContact(Contact contact) throws IOException { ObjectMapper mapper = new ObjectMapper(); String data = mapper.writeValueAsString(contact); JestConfig.indexData(typeName, data); }
From source file:persistence.ContactPersistence.java
public static void updateContact(String id, Contact contact) throws IOException { ObjectMapper mapper = new ObjectMapper(); String data = mapper.writeValueAsString(contact); JestConfig.updateData(typeName, id, data); }