Java tutorial
// Copyright 2007 Hitachi Data Systems // All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. You may obtain // a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations // under the License. package com.archivas.clienttools.arcmover.cli; import com.archivas.clienttools.arcutils.api.*; import com.archivas.clienttools.arcutils.api.jobs.DeleteJob; import com.archivas.clienttools.arcutils.api.jobs.ManagedJob; import com.archivas.clienttools.arcutils.config.HCPMoverProperties; import com.archivas.clienttools.arcutils.profile.AbstractProfileBase; import com.archivas.clienttools.arcutils.profile.Hcp3AuthNamespaceProfile; import com.archivas.clienttools.arcutils.profile.ProfileManager; import com.archivas.clienttools.arcutils.model.LoadSchedule; import com.archivas.clienttools.arcutils.utils.FileListParser; import com.archivas.clienttools.arcutils.utils.FileListParserException; import com.archivas.clienttools.arcutils.utils.database.DatabaseException; import org.apache.commons.cli.*; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; public class ArcDelete extends ManagedCLIJob { public static final String PACKAGE_NAME = ArcCopy.class.getPackage().getName(); public static final String CLASS_FULL_NAME = ArcCopy.class.getName(); public static final String CLASS_NAME = CLASS_FULL_NAME.substring(PACKAGE_NAME.length() + 1); public static Logger LOG = Logger.getLogger(CLASS_FULL_NAME); private static String PROFILE_OPTION = "profile"; private static String PATH_OPTION = "path"; private static String OPERATION_OPTION = "operation"; private static String REASON_OPTION = "reason"; ArcMoverEngine arcMover = ArcMoverFactory.getInstance(); /** * Command Line Options * */ private static Options cliOptions; private static int cliOptionsCount = 0; static { addNextCLIOption(HELP_OPTION); addNextCLIOption(PROFILE_OPTION); addNextCLIOption(PATH_OPTION); addNextCLIOption(OPERATION_OPTION); addNextCLIOption(REASON_OPTION); addNextCLIOption(JOB_NAME); addNextCLIOption(MAX_CONNECTIONS); addNextCLIOption(MAX_NODE_CONNECTIONS); addNextCLIOption(REDUCED_MAX_CONNECTIONS); addNextCLIOption(REDUCED_MAX_NODE_CONNECTIONS); addNextCLIOption(REDUCED_START); addNextCLIOption(REDUCED_END); addNextCLIOption(EXPORT_RESULTS_TYPE); addNextCLIOption(EXPORT_RESULTS_PATH); addNextCLIOption(EXPORT_RESULTS_PREFIX); addNextCLIOption(RESUME); addNextCLIOption(RERUN); addNextCLIOption(INSECURE_OPTION); } private static void addNextCLIOption(String option) { cliOrder.put(option, cliOptionsCount++); } public ArcDelete(String args[], int numCmdLineArgs) { super(args, numCmdLineArgs); HELP_USAGE_LINE = commandName + " delete --profile <profile_name> [options] [list_file]"; HELP_HEADER = "Deletes items from the specified location. Items to delete are listed in the list_file.\n"; } @SuppressWarnings({ "static-access", "AccessStaticViaInstance" }) public Options getOptions() { if (cliOptions == null) { Options options = new Options(); // *** Adding a new option needs to be added to the cliOrder list // Note, you cannot do required options with this library and help so we have to add all // as non-required // and deal with it when parsing options.addOption(OptionBuilder.withDescription("Displays this help text (the default behavior).") .withLongOpt(HELP_OPTION).create("h")); // Required options.addOption(OptionBuilder.withArgName("profile_name").hasArg().withDescription( "Target location for the delete operation: either a namespace profile name or LFS for the local file system.") .withLongOpt(PROFILE_OPTION).create("p")); options.addOption(OptionBuilder.withArgName("path").hasArg() .withDescription("Directory in which to perform the delete operation.").withLongOpt(PATH_OPTION) .create()); options.addOption(OptionBuilder.withArgName("operation_type").hasArg().withDescription( "Type of operation to perform: delete, purge, privileged-delete, or privileged-purge. If omitted, defaults to delete. Only supported for HCP namespaces.") .withLongOpt(OPERATION_OPTION).create()); options.addOption(OptionBuilder.withArgName("string").hasArg().withDescription( "Specifies the reason for a privileged operation. The string must be from one through 1024 characters long. Required and only supported for a privileged operation.") .withLongOpt(REASON_OPTION).create()); // Required options.addOption(OptionBuilder.withArgName("job_name").hasOptionalArg().withDescription( "Reruns the delete job with the given job name if provide, if no name is provided it reruns the last delete job run. When rerunning you can change the load and export settings. Any changes to the profile, path, or operation-type will not change what is set in the job.") .withLongOpt(RERUN).create()); options.addOption(OptionBuilder.withArgName("job_name").hasOptionalArg().withDescription( "Resumes the delete job from where it left off, if no name is provided it resumes the last delete job run. When rerunning you can change the load and export settings. Any changes to the profile, path, or operation-type will not change what is set in the job.") .withLongOpt(RESUME).create()); // Optional options.addOption(getInsecureSSLOption()); // results_types is only not shared because CONFLICT is not a valid option for delete // jobs options.addOption(OptionBuilder.withArgName("results_types").hasArg().withDescription( "Types of results lists to export: either ALL or a comma-separated list that includes one or more of SUCCESS, FAILURE, and JOBLIST. If omitted no results lists are exported.") .withLongOpt(EXPORT_RESULTS_TYPE).create()); getSharedOptions(options); cliOptions = options; } return cliOptions; } protected void parseArgs() throws ParseException { // create the command cmdLine parser CommandLineParser parser = new PosixParser(); CommandLine cmdLine; // parse the command cmdLine arguments cmdLine = parser.parse(getOptions(), getArgs()); // Help printHelp = cmdLine.hasOption("h"); if (printHelp) { return; } initializeProfiles(cmdLine.hasOption("insecure")); @SuppressWarnings({ "unchecked" }) List<String> argList = cmdLine.getArgList(); // Handle the load schedule and export lists LoadSchedule schedule = LoadSchedule.getDefaultLoadSchedule(); getLoadSchedule(cmdLine, schedule); setUpExportListThread(cmdLine); // Check for debug setting where we can rerun a job -- this is for testing purposes only // Validate the input file if one was provided // See if we are rerunning, set up the job if we are boolean rerunning = handleRerunAndResume(cmdLine, schedule); if (rerunning) { List<String> extraOptions = new ArrayList<String>(); if (cmdLine.hasOption(PROFILE_OPTION)) { extraOptions.add(PROFILE_OPTION); } if (cmdLine.hasOption(PATH_OPTION)) { extraOptions.add(PATH_OPTION); } if (cmdLine.hasOption(OPERATION_OPTION)) { extraOptions.add(OPERATION_OPTION); } if (cmdLine.hasOption(REASON_OPTION)) { extraOptions.add(REASON_OPTION); } if (cmdLine.hasOption(JOB_NAME)) { extraOptions.add(JOB_NAME); } if (!extraOptions.isEmpty()) { throw new ParseException("The following supplied options are not allowed with --" + RESUME + " or --" + RERUN + ": " + extraOptions); } // The list_file is not allowed for rerun/resume if (argList.size() > numCmdLineArgs - 1) { throw new ParseException( "The list_file argument is not allowed with --" + RESUME + " or --" + RERUN); } } else { if (argList.size() != numCmdLineArgs) { throw new ParseException("Missing argument list_file."); } // Get the name of the input file String listFileName = argList.get(numCmdLineArgs - 1); // Required fields String srcProfileName = getProfileNameFromCmdLineAndValidateExistance(cmdLine, PROFILE_OPTION); AbstractProfileBase srcProfile = ProfileManager.getProfileByName(srcProfileName); // Optional fields String sourcePath = null; if (cmdLine.hasOption(PATH_OPTION)) { sourcePath = cmdLine.getOptionValue(PATH_OPTION); srcProfile.setDisplayPath(sourcePath); } String jobName = null; if (cmdLine.hasOption(JOB_NAME)) { jobName = cmdLine.getOptionValue(JOB_NAME); } DeleteJob.Operation operation = DeleteJob.Operation.DELETE; String reason = null; if (srcProfile instanceof Hcp3AuthNamespaceProfile) { if (cmdLine.hasOption(OPERATION_OPTION)) { String operationStr = cmdLine.getOptionValue(OPERATION_OPTION); try { operation = DeleteJob.Operation.getFromString(operationStr); } catch (IllegalArgumentException e) { throw new ParseException(e.getMessage()); } } // Make sure if the operation if a privileged on that we get a reason if (operation.isPrivilegedOperation()) { if (!cmdLine.hasOption(REASON_OPTION)) { throw new ParseException("The " + REASON_OPTION + " option is required with a " + operation.getStringRepresentation() + " operation."); } reason = cmdLine.getOptionValue(REASON_OPTION); } else { if (cmdLine.hasOption(REASON_OPTION)) { throw new ParseException("The " + REASON_OPTION + " option is only supported for " + DeleteJob.Operation.PRIVILEGED_DELETE.getStringRepresentation() + " and " + DeleteJob.Operation.PRIVILEGED_PURGE.getStringRepresentation() + " operations."); } } } else { List<String> extraOptions = new ArrayList<String>(); if (cmdLine.hasOption(OPERATION_OPTION)) { extraOptions.add(OPERATION_OPTION); } if (cmdLine.hasOption(REASON_OPTION)) { extraOptions.add(REASON_OPTION); } if (!extraOptions.isEmpty()) { throw new ParseException( "The following supplied options are only supported for HCP namespaces: " + extraOptions); } } // Validate the input file try { FileListParser.validateFile(new File(listFileName), srcProfile, sourcePath, ""); } catch (IOException e) { throw new ParseException("Error parsing input file. Msg: " + e.getMessage()); } catch (FileListParserException e) { throw new ParseException("Error parsing input file. Msg: " + e.getMessage()); } // Setup the job with the arguments try { setupDeleteJob(listFileName, srcProfile, sourcePath, jobName, operation, reason, schedule); managedJobImpl = arcMover.createManagedJob(managedJob); } catch (IllegalArgumentException e) { throw new ParseException( "IllegalArgumentException writing to database during file list parsing. Msg: " + e.getMessage()); } catch (DatabaseException e) { throw new ParseException( "DatabaseException writing to database during file list parsing. Msg: " + e.getMessage()); } catch (JobException e) { throw new ParseException( "JobException writing to database during file list parsing. Msg: " + e.getMessage()); } } } protected String getErrorMessage() { return " failed to delete because: "; } protected ManagedJob.Type getJobType() { return ManagedJob.Type.DELETE; } protected JobId getLastJobID() throws NumberFormatException { return new JobId(Long.parseLong(HCPMoverProperties.LAST_DELETE_JOB_RUN.get())); } @Override protected String getFormattedJobStats(ManagedJobStats jobStats) { return String.format("%3$,d objects found, %1$,d/%2$,d files deleted", jobStats.getCompletedObjectCount(), jobStats.getTotalObjectCount(), jobStats.getDiscoveredObjectCount()); } protected void appendAdditionalOutput(ManagedJobStats jobStats, StringBuilder sb, String padding) { // no-op } private void setupDeleteJob(String listFileName, AbstractProfileBase profile, String path, String jobName, DeleteJob.Operation operation, String reason, LoadSchedule schedule) throws DatabaseException { DeleteJob deleteJob = new DeleteJob(profile, new File(listFileName)); if (jobName != null) { deleteJob.setUserDefinedJobName(jobName); } deleteJob.setOperation(operation); if (reason != null) { deleteJob.setReason(reason); } deleteJob.setSourcePath(profile.encode(path)); deleteJob.setLoadSchedule(schedule); managedJob = deleteJob; } public boolean supportsConflictReports() { return false; } }