List of usage examples for org.apache.maven.plugin.logging Log info
void info(Throwable error);
From source file:org.ihtsdo.mojo.maven.GenerateScripts.java
License:Apache License
public void execute() throws MojoExecutionException { Log l = getLog(); try {// w w w .java 2 s .c om if (MojoUtil.alreadyRun(l, execution.getExecutionId(), this.getClass(), targetDirectory)) { return; } } catch (NoSuchAlgorithmException e1) { throw new MojoExecutionException(e1.getMessage(), e1); } catch (IOException e1) { throw new MojoExecutionException(e1.getMessage(), e1); } if (scriptOutputDir == null) { if (libDir == null) { l.info("Skipping generate scripts. scriptOutputDir and libDir are null"); return; } else { l.info("Skipping generate scripts. scriptOutputDir is null"); return; } } if (outputDirectory == null) { l.info("Skipping generate scripts. outputDirectory is null"); return; } l.info("scriptOutputDir: " + scriptOutputDir); l.info("outputDirectory: " + outputDirectory); l.info("libDir: " + libDir); l.info("jarDir: " + libDir); if (scriptNames != null) { l.info("scriptNames: " + Arrays.asList(scriptNames)); } else { l.info("scriptNames: " + scriptNames); } File[] jars; if (jarDir != null) { File jarDirectory = new File(outputDirectory + fileSep + jarDir); l.info("jarDirectory 1: " + jarDirectory); jars = jarDirectory.listFiles(new FileFilter() { public boolean accept(File pathname) { return pathname.getPath().endsWith(".jar"); } }); } else { File jarDirectory = new File(outputDirectory + fileSep + scriptOutputDir + libDir); l.info("jarDirectory 2: " + jarDirectory); jars = jarDirectory.listFiles(new FileFilter() { public boolean accept(File pathname) { return pathname.getPath().endsWith(".jar"); } }); } if (scriptNames == null) { startAllScript(jars, "startCore", "start-core.config", "500m", "500m", "Workflow Bundle", true, false, false, false); startAllScript(jars, "startJehri", "start-jehri.config", "500m", "500m", "Jehri Bundle", true, false, false, false); startAllScript(jars, "startJehriAuthoring", "start-jehri-authoring.config", "500m", "500m", "Jehri Authoring Bundle", true, false, false, false); startAllScript(jars, "startJehriCentral", "start-jehri-central.config", "500m", "500m", "Jehri Central Bundle", true, false, false, false); startAllScript(jars, "startJehriSatellite", "start-jehri-satellite.config", "500m", "500m", "Jehri Satellite Bundle", true, false, false, false); startAllScript(jars, "startAce", "start-ace.config", "700m", "1400m", "Ace Bundle", true, false, false, false); startAllScript(jars, "ace", "start-ace-local.config", "700m", "1400m", "Ace Bundle", true, false, false, false); startAllScript(jars, "dAce", "start-ace-local.config", "700m", "1400m", "Ace Bundle", true, false, true, false); startAllScript(jars, "pAce", "start-ace-local.config", "700m", "1400m", "Ace Bundle", true, false, false, true); startAllScript(jars, "dStartJehri", "start-jehri.config", "500m", "500m", "Jehri Bundle", true, false, true, false); startAllScript(jars, "pStartJehri", "start-jehri.config", "500m", "500m", "Jehri Bundle", true, false, false, true); } else { for (String name : scriptNames) { l.info("generating script: " + name); if (name.equalsIgnoreCase("startCore")) { startAllScript(jars, "startCore", "start-core.config", "500m", "500m", "Workflow Bundle", true, false, false, false); } else if (name.equalsIgnoreCase("startJehri")) { startAllScript(jars, "startJehri", "start-jehri.config", "500m", "500m", "Jehri Bundle", true, false, false, false); } else if (name.equalsIgnoreCase("startAce")) { startAllScript(jars, "startAce", "start-ace.config", "700m", "1400m", "Ace Bundle", true, false, false, false); startAllScript(jars, "startAceNoNet", "start-ace-local.config", "700m", "1400m", "Ace Bundle", true, false, false, false); } else if (name.equalsIgnoreCase("amtViewer")) { startAllScript(jars, "amtViewer", "start-ace.config", "700m", "1400m", "AMT Viewer", true, true, false, false); } else if (name.equalsIgnoreCase("dAce")) { startAllScript(jars, "dAce", "start-ace-local.config", "700m", "1400m", "Ace Bundle", true, false, true, false); } else if (name.equalsIgnoreCase("pAce")) { startAllScript(jars, "pAce", "start-ace-local.config", "700m", "1400m", "Ace Bundle", true, false, false, true); } else if (name.equalsIgnoreCase("dStartJehri")) { startAllScript(jars, "dStartJehri", "start-jehri.config", "500m", "500m", "Jehri Bundle", true, false, true, false); } else if (name.equalsIgnoreCase("pStartJehri")) { startAllScript(jars, "pStartJehri", "start-jehri.config", "500m", "500m", "Jehri Bundle", true, false, false, true); } else if (name.equalsIgnoreCase("wb")) { startAllScript(jars, "wb", "start-wb-local.config", "1400m", "1400m", "Workbench Bundle", true, false, false, false); } else if (name.equalsIgnoreCase("dWb")) { startAllScript(jars, "dWb", "start-wb-local.config", "1400m", "1400m", "Workbench Bundle", true, false, true, false); } else if (name.equalsIgnoreCase("pWb")) { startAllScript(jars, "pWb", "start-wb-local.config", "1400m", "1400m", "Workbench Bundle", true, false, false, true); } } } }
From source file:org.ihtsdo.mojo.maven.MojoUtil.java
License:Apache License
public static boolean allowedGoal(Log log, List<String> sessionGoals, String[] allowedGoals) { boolean allowedGoal = false; for (String goal : allowedGoals) { if (sessionGoals.contains(goal)) { allowedGoal = true;/*from w w w . j a v a 2 s .c o m*/ break; } } if (allowedGoal == false) { log.info("Skipping execution since session goals: " + sessionGoals + " do not contain one of the following allowed goals: " + Arrays.asList(allowedGoals)); } return allowedGoal; }
From source file:org.ihtsdo.mojo.maven.MojoUtil.java
License:Apache License
public static boolean alreadyRun(Log l, String input, Class<?> targetClass, File targetDir) throws NoSuchAlgorithmException, IOException { Sha1HashCodeGenerator generator = new Sha1HashCodeGenerator(); if (input == null) { input = targetClass.getName();/*ww w .jav a 2 s . c om*/ l.warn("Input is NULL. Using mojo class name instead..."); } generator.add(input); String hashCode = generator.getHashCode(); File goalFileDirectory = new File(targetDir, "completed-mojos"); File goalFile = new File(goalFileDirectory, hashCode); // check to see if this goal has been executed previously if (!goalFile.exists()) { // create a new file to indicate this execution has completed goalFileDirectory.mkdirs(); goalFile.createNewFile(); } else { l.info("Previously executed: " + goalFile.getAbsolutePath() + "\nNow stopping."); StringOutputStream sos = new StringOutputStream(); PrintStream ps = new PrintStream(sos); l.info("Properties: " + sos.toString()); return true; } return false; }
From source file:org.ihtsdo.mojo.maven.Transform.java
License:Apache License
public void execute() throws MojoExecutionException, MojoFailureException { Log logger = getLog(); logger.info("starting transform: " + Arrays.asList(outputSpecs)); // calculate the SHA-1 hashcode for this mojo based on input Sha1HashCodeGenerator generator;/*from w w w.j a v a 2 s.c om*/ String hashCode = ""; try { generator = new Sha1HashCodeGenerator(); for (int i = 0; i < outputSpecs.length; i++) { generator.add(outputSpecs[i]); } generator.add(idFileLoc); generator.add(appendIdFiles); generator.add(idEncoding); generator.add(outputColumnDelimiter); generator.add(outputCharacterDelimiter); Iterator iter = sourceRoots.iterator(); while (iter.hasNext()) { generator.add(iter.next()); } hashCode = generator.getHashCode(); } catch (NoSuchAlgorithmException e) { System.out.println(e); } File goalFileDirectory = new File("target" + File.separator + "completed-mojos"); File goalFile = new File(goalFileDirectory, hashCode); // check to see if this goal has been executed previously if (!goalFile.exists()) { logger.info("goal has not run before"); // hasn't been executed previously try { for (OutputSpec outSpec : outputSpecs) { logger.info("processing " + outSpec); I_TransformAndWrite[] writers = outSpec.getWriters(); for (I_TransformAndWrite tw : writers) { File outputFile = new File(tw.getFileName()); outputFile.getParentFile().mkdirs(); FileOutputStream fos = new FileOutputStream(outputFile, tw.append()); OutputStreamWriter osw = new OutputStreamWriter(fos, tw.getOutputEncoding()); BufferedWriter bw = new BufferedWriter(osw); tw.init(bw, this); } if (outSpec.getConstantSpecs() != null) { for (I_ReadAndTransform constantTransform : outSpec.getConstantSpecs()) { constantTransform.setup(this); constantTransform.transform("test"); for (I_TransformAndWrite tw : writers) { tw.addTransform(constantTransform); } } } InputFileSpec[] inputSpecs = outSpec.getInputSpecs(); for (InputFileSpec spec : inputSpecs) { nextColumnId = 0; Map<Integer, Set<I_ReadAndTransform>> columnTransformerMap = new HashMap<Integer, Set<I_ReadAndTransform>>(); logger.info("Now processing file spec:\n\n" + spec); for (I_ReadAndTransform t : spec.getColumnSpecs()) { t.setup(this); Set<I_ReadAndTransform> transformerSet = (Set<I_ReadAndTransform>) columnTransformerMap .get((Integer) t.getColumnId()); if (transformerSet == null) { transformerSet = new HashSet<I_ReadAndTransform>(); columnTransformerMap.put((Integer) t.getColumnId(), transformerSet); } transformerSet.add(t); for (I_TransformAndWrite tw : writers) { tw.addTransform(t); } } File inputFile = normalize(spec); if (inputFile != null) { if (inputFile.length() == 0) { logger.warn("skipping 0 length file " + inputFile); continue; } } else { throw new MojoFailureException("Spec cannot be normalized. Does the input file exist?"); } FileInputStream fs = new FileInputStream(inputFile); InputStreamReader isr = new InputStreamReader(fs, spec.getInputEncoding()); BufferedReader br = new BufferedReader(isr); StreamTokenizer st = new StreamTokenizer(br); st.resetSyntax(); st.wordChars('\u001F', '\u00FF'); st.ordinaryChar(spec.getInputColumnDelimiter()); st.eolIsSignificant(true); if (spec.skipFirstLine()) { skipLine(st); } int tokenType = st.nextToken(); int rowCount = 0; while (tokenType != StreamTokenizer.TT_EOF) { int currentColumn = 0; while (tokenType != '\r' && tokenType != '\n' && tokenType != StreamTokenizer.TT_EOF) { /* * if (rowCount >= spec.getDebugRowStart() && * rowCount <= spec.getDebugRowEnd()) { * getLog().info("Transforming column: " + * currentColumn + " string token: " + st.sval); * getLog().info("Current row:" + rowCount); * } */ if (columnTransformerMap.get((Integer) currentColumn) == null) { } else { for (Object tObj : (Set) columnTransformerMap.get((Integer) currentColumn)) { I_ReadAndTransform t = (I_ReadAndTransform) tObj; /* * if (rowCount >= * spec.getDebugRowStart() && rowCount * <= spec.getDebugRowEnd()) { * * * * * * * getLog().info("Transform for column: " * + currentColumn + " is: " + t); * } */ if (tokenType == spec.getInputColumnDelimiter().charValue()) { t.transform(null); } else { t.transform(st.sval); } /* * if (rowCount >= * spec.getDebugRowStart() && rowCount * <= spec.getDebugRowEnd()) { * getLog().info("Transform: " + t + * " result: " + result); * } */ } } tokenType = st.nextToken(); if (spec.getInputColumnDelimiter().charValue() == tokenType) { // CR or LF tokenType = st.nextToken(); if (spec.getInputColumnDelimiter().charValue() == tokenType) { st.pushBack(); } } currentColumn++; } for (I_TransformAndWrite tw : writers) { tw.processRec(); } switch (tokenType) { case '\r': // is CR // LF tokenType = st.nextToken(); break; case '\n': // LF break; case StreamTokenizer.TT_EOF: // End of file break; default: throw new Exception( "There are more columns than transformers. Tokentype: " + tokenType); } rowCount++; if (rowCount % PROGRESS_LOGGING_SIZE == 0) { logger.info( "processed " + rowCount + " rows of file " + inputFile.getAbsolutePath()); } // Beginning of loop tokenType = st.nextToken(); } fs.close(); logger.info("Processed: " + rowCount + " rows."); } logger.info("closing writers"); int count = 0; for (I_TransformAndWrite tw : writers) { logger.info("closing " + ++count + " of " + writers.length); tw.close(); } logger.info("cleanup inputs"); count = 0; for (InputFileSpec ifs : inputSpecs) { logger.info("cleaning input spec " + ++count + " of " + inputSpecs.length); int transformCount = 0; I_ReadAndTransform[] columnSpecs = ifs.getColumnSpecs(); for (I_ReadAndTransform t : columnSpecs) { logger.info("cleaning column spec " + ++transformCount + " of " + columnSpecs.length); t.cleanup(this); } } logger.info("cleanup inputs - done"); } if (uuidToNativeMap != null) { logger.info("ID map is not null."); // write out id map... File outputFileLoc = new File(idFileLoc); outputFileLoc.getParentFile().mkdirs(); File file = new File(outputFileLoc, "uuidToNative.txt"); FileOutputStream fos = new FileOutputStream(file, appendIdFiles); OutputStreamWriter osw = new OutputStreamWriter(fos, idEncoding); BufferedWriter bw = new BufferedWriter(osw); if (includeHeader) { bw.append("UUID"); bw.append(outputColumnDelimiter); bw.append("NID"); bw.append("\n"); } int rowcount = 0; for (Iterator i = uuidToNativeMap.entrySet().iterator(); i.hasNext();) { Map.Entry entry = (Entry) i.next(); bw.append(entry.getKey().toString()); bw.append(outputColumnDelimiter); bw.append(entry.getValue().toString()); bw.append("\n"); rowcount++; if (rowcount++ % PROGRESS_LOGGING_SIZE == 0) { logger.info("processed " + rowcount + " rows of file " + file.getAbsolutePath()); } } bw.close(); } logger.info("writing out the source to uuid map"); for (Iterator keyItr = sourceToUuidMapMap.keySet().iterator(); keyItr.hasNext();) { String key = (String) keyItr.next(); File outputFileLoc = new File(idFileLoc); outputFileLoc.getParentFile().mkdirs(); File file = new File(outputFileLoc, key + "ToUuid.txt"); FileOutputStream fos = new FileOutputStream(file, appendIdFiles); OutputStreamWriter osw = new OutputStreamWriter(fos, idEncoding); BufferedWriter bw = new BufferedWriter(osw); if (includeHeader) { bw.append(key.toUpperCase()); bw.append(outputColumnDelimiter); bw.append("UUID"); bw.append("\n"); } Map idMap = (Map) sourceToUuidMapMap.get(key); int rowcount = 0; for (Iterator i = idMap.entrySet().iterator(); i.hasNext();) { Map.Entry entry = (Entry) i.next(); bw.append(entry.getKey().toString()); bw.append(outputColumnDelimiter); bw.append(entry.getValue().toString()); bw.append("\n"); rowcount++; if (rowcount++ % PROGRESS_LOGGING_SIZE == 0) { logger.info("processed " + rowcount + " rows of file " + file.getAbsolutePath()); } } bw.close(); } // create a new file to indicate this execution has completed try { goalFileDirectory.mkdirs(); goalFile.createNewFile(); } catch (IOException e) { e.printStackTrace(); } logger.info("execution complete"); } catch (FileNotFoundException e) { throw new MojoExecutionException(e.getMessage(), e); } catch (UnsupportedEncodingException e) { throw new MojoExecutionException(e.getMessage(), e); } catch (IOException e) { throw new MojoExecutionException(e.getMessage(), e); } catch (Exception e) { throw new MojoExecutionException(e.getMessage(), e); } } else { logger.info("Skipping goal - executed previously."); } }
From source file:org.ihtsdo.mojo.maven.WriteConfigFile.java
License:Apache License
public void execute() throws MojoExecutionException, MojoFailureException { Log l = getLog(); try {// w ww . j a v a2 s . c o m if (MojoUtil.alreadyRun(l, execution.getExecutionId(), this.getClass(), targetDirectory)) { return; } } catch (NoSuchAlgorithmException e1) { throw new MojoExecutionException(e1.getMessage(), e1); } catch (IOException e1) { throw new MojoExecutionException(e1.getMessage(), e1); } List<Artifact> dependencyWithoutProvided = new ArrayList<Artifact>(); for (Artifact a : artifacts) { if (a.getScope().equals("provided")) { getLog().info("Not adding provided: " + a); } else if (a.getGroupId().endsWith("runtime-directory") || a.getScope().equals("runtime-directory")) { getLog().info("Not adding runtime-directory: " + a); } else { if (a.getScope().equals("system")) { getLog().info("System dependency: " + a); } dependencyWithoutProvided.add(a); } } try { URLClassLoader libLoader = MojoUtil.getProjectClassLoader(dependencyWithoutProvided); this.outputDirectory.mkdirs(); for (int i = 0; i < specs.length; i++) { l.info("writing config for: " + specs[i]); try { Class<?> specClass = libLoader.loadClass(specs[i].getClassName()); Constructor<?> specConstructor = specClass.getConstructor(new Class[] {}); Object obj = specConstructor.newInstance(new Object[] {}); Method m = specClass.getMethod(specs[i].getMethodName(), new Class[] { File.class }); m.invoke(obj, new Object[] { new File(outputDirectory, specs[i].getConfigFileName()) }); } catch (Exception e) { throw new MojoExecutionException("Problem writing config file: " + specs[i].getClassName(), e); } } } catch (Exception e) { throw new MojoExecutionException(e.getLocalizedMessage(), e); } }
From source file:org.ihtsdo.mojo.maven.WriteDirectories.java
License:Apache License
public void execute() throws MojoExecutionException, MojoFailureException { Log l = getLog(); try {// w ww . j ava2 s .co m if (MojoUtil.alreadyRun(getLog(), this.getClass().getCanonicalName() + dependencies + outputDirectory.getCanonicalPath(), this.getClass(), targetDirectory)) { return; } } catch (NoSuchAlgorithmException e) { throw new MojoExecutionException(e.getLocalizedMessage(), e); } catch (IOException e) { throw new MojoExecutionException(e.getLocalizedMessage(), e); } for (Artifact a : artifacts) { if (a.getScope().equals("runtime-directory")) { File rootDir = this.outputDirectory; if (targetSubDir != null) { rootDir = new File(rootDir, targetSubDir); } extractArtifactDependencyToDir(l, rootDir, a); } else if (a.getScope().equals("resource-directory")) { File rootDir = new File(this.sourceDirectory.getParentFile(), "resources"); if (targetSubDir != null) { rootDir = new File(rootDir, targetSubDir); } if (rootDir.exists() == false) { extractArtifactDependencyToDir(l, rootDir, a); } else { l.info("resource directory already exists: " + rootDir.getAbsolutePath()); } } } }
From source file:org.ihtsdo.mojo.maven.WriteDirectories.java
License:Apache License
private void extractArtifactDependencyToDir(Log l, File rootDir, Artifact a) throws MojoExecutionException { l.info("Processing dependency artifact: " + a); l.info(" file: " + a.getFile()); try {/* w ww . ja va 2 s . co m*/ FileInputStream fis = new FileInputStream(a.getFile()); BufferedInputStream bis = new BufferedInputStream(fis); JarInputStream jis = new JarInputStream(bis); JarEntry je = jis.getNextJarEntry(); while (je != null) { // l.info(" entry: " + je.getName()); if (je.getName().contains("META-INF") == false) { // l.info(" entry ok"); File destFile = new File(rootDir, je.getName()); destFile.getParentFile().mkdirs(); if (je.isDirectory()) { destFile.mkdirs(); } else { OutputStream fos = new FileOutputStream(destFile); byte[] buffer = new byte[10240]; long bytesToRead = je.getSize(); while (bytesToRead > 0) { // write contents of // 'is' to // 'fos' int bytesRead = jis.read(buffer); fos.write(buffer, 0, bytesRead); bytesToRead = bytesToRead - bytesRead; } fos.close(); destFile.setLastModified(je.getTime()); } } je = jis.getNextJarEntry(); } jis.close(); } catch (Exception e) { throw new MojoExecutionException(e.getMessage() + " file:" + a.getFile(), e); } }
From source file:org.impalaframework.maven.plugin.CopyModulesMojo.java
License:Apache License
public void execute() throws MojoExecutionException { final Log logger = getLog(); if (isImpalaHost()) { moduleStagingDirectory = MojoUtils.getModuleStagingDirectory(getLog(), project, moduleStagingDirectory); if (logger.isDebugEnabled()) { logger.debug("Maven projects: " + dependencies); logger.debug("Current project: " + project); }// www .j a va 2 s . c om File targetDirectory = getTargetDirectory(); File stagingDirectory = new File(moduleStagingDirectory); try { if (logger.isDebugEnabled()) { logger.debug("Staging directory " + stagingDirectory.getCanonicalPath()); } FileUtils.forceMkdir(targetDirectory); } catch (IOException e) { throw new MojoExecutionException(e.getMessage(), e); } if (logger.isInfoEnabled()) { logger.info("Copying files from staging directory: " + stagingDirectory); } final File[] listFiles = stagingDirectory.listFiles(); if (listFiles != null) { for (File moduleFile : listFiles) { final String targetFileName = moduleFile.getName(); MojoUtils.copyFile(moduleFile, targetDirectory, targetFileName); if (logger.isInfoEnabled()) { logger.info("Copying from from staging directory: " + moduleFile); } } } } }
From source file:org.impalaframework.maven.plugin.MojoUtils.java
License:Apache License
static String getModuleStagingDirectory(Log log, MavenProject project, String moduleStagingDirectory) throws MojoExecutionException { //FIXME test//from w ww . j a v a 2s. c o m String parentName = null; if (moduleStagingDirectory == null) { MavenProject parent = project.getParent(); if (parent != null) { parentName = parent.getName(); final String parentOutputDirectory = parent.getBuild().getDirectory(); if (parentOutputDirectory != null) { moduleStagingDirectory = parentOutputDirectory + "/staging"; } } } if (moduleStagingDirectory == null) { throw new MojoExecutionException("Unable to determine module staging directory for project '" + project.getName() + "'" + (parentName != null ? " from project parent '" + parentName + "'" : " with no project parent") + ". Please use 'moduleStagingDirectory' configuration parameter to specify this."); } log.info("Using module staging directory: " + moduleStagingDirectory); return moduleStagingDirectory; }
From source file:org.impalaframework.maven.plugin.StageModuleMojo.java
License:Apache License
public void execute() throws MojoExecutionException { final Log log = getLog(); boolean isImpalaModule = isImpalaModule(); if (isImpalaModule) { //copying module to staging directory moduleStagingDirectory = MojoUtils.getModuleStagingDirectory(getLog(), project, moduleStagingDirectory); final File file = project.getArtifact().getFile(); if (log.isInfoEnabled()) { log.info("Copying file " + file.getAbsolutePath() + " to module staging directory: " + moduleStagingDirectory); }//from w ww .j av a2 s . c o m final File targetDirectory = new File(moduleStagingDirectory); try { FileUtils.forceMkdir(targetDirectory); } catch (IOException e) { throw new MojoExecutionException(e.getMessage(), e); } MojoUtils.copyFile(file, targetDirectory, file.getName()); } }