List of usage examples for org.apache.maven.plugin MojoExecutionException MojoExecutionException
public MojoExecutionException(String message)
MojoExecutionException
exception providing a message
. From source file:com.clearstorydata.maven.plugins.shadediff.mojo.ShadeDiffMojo.java
License:Apache License
public void execute() throws MojoExecutionException { try {/* ww w. jav a 2s . c o m*/ Plugin shadePlugin = lookupPlugin("org.apache.maven.plugins:maven-shade-plugin"); if (shadePlugin == null) { getLog().info("maven-shade-plugin not found, skipping shade-diff execution"); return; } if (excludeShadedJars == null) { getLog().info( "No shaded jars specified to exclude the contents of, skipping " + "shade-diff execution"); return; } Map<String, String> idToVersion = new HashMap<String, String>(); for (Artifact artifact : project.getArtifacts()) { idToVersion.put(getIdWithoutVersion(artifact), artifact.getVersion()); } Set<String> excludes = new TreeSet<String>(); for (ShadedJarExclusion excludedShadedJar : excludeShadedJars) { ArtifactResolutionResult resolutionResult = resolveShadedJarToExclude(excludedShadedJar); if (resolutionResult.getArtifacts().isEmpty()) { throw new MojoExecutionException("Could not resolve shaded jar artifact to exclude: " + "groupId=" + excludedShadedJar.getGroupId() + ", " + "artifactId=" + excludedShadedJar.getArtifactId() + ", " + "version=" + excludedShadedJar.getVersion() + ", " + "classifier=" + excludedShadedJar.getClassifier()); } for (Artifact excludedShadedJarArtifact : resolutionResult.getArtifacts()) { ZipFile zip = new ZipFile(excludedShadedJarArtifact.getFile().getPath()); ZipEntry entry = zip.getEntry(SHADED_JAR_CONTENTS_ENTRY); if (entry != null) { BufferedReader reader = new BufferedReader( new InputStreamReader(zip.getInputStream(entry))); String line; while ((line = reader.readLine()) != null) { String[] items = line.split(":"); if (items.length < 4 || items.length > 5) { getLog().warn( "Invalid full artifact ID line from " + excludedShadedJarArtifact.getId() + "'s list of " + "included jars, skipping: " + line); continue; } String groupId = items[0]; String artifactId = items[1]; String type = items[2]; String classifier = items.length == 5 ? items[3] : ""; String version = items[items.length - 1]; Artifact shadedJarDep = factory.createArtifactWithClassifier(groupId, artifactId, version, type, classifier); String groupArtifactType = getIdWithoutVersion(shadedJarDep); String projectDepVersion = idToVersion.get(groupArtifactType); if (projectDepVersion != null && shadedJarDep.getVersion().equals(projectDepVersion)) { String exclude = shadedJarDep.getGroupId() + ":" + shadedJarDep.getArtifactId() + ":*"; if (!excludes.contains(exclude)) { excludes.add(exclude); getLog().info("Excluding from shaded jar: " + exclude + " (already included in " + excludedShadedJarArtifact.getId() + ")"); } } } } else { // We make this a build failure, because this indicates that the shaded jar was not // built correctly. throw new MojoExecutionException("No contents entry " + SHADED_JAR_CONTENTS_ENTRY + " found in " + excludedShadedJarArtifact.getFile().getPath()); } } } if (!excludes.isEmpty()) { String joinedExcludes = Joiner.on(",").join(excludes); project.getProperties().setProperty("maven.shade.plugin.additionalExcludes", joinedExcludes); } } catch (IOException ex) { getLog().error(ex); throw new MojoExecutionException("IOException", ex); } }
From source file:com.clearstorydata.maven.plugins.shadediff.mojo.ShadeDiffMojo.java
License:Apache License
private ArtifactResolutionResult resolveShadedJarToExclude(ShadedJarExclusion excludedShadedJar) throws MojoExecutionException { Artifact excludedShadedJarArtifact = this.factory.createArtifactWithClassifier( excludedShadedJar.getGroupId(), excludedShadedJar.getArtifactId(), excludedShadedJar.getVersion(), "jar", excludedShadedJar.getClassifier() == null ? "" : excludedShadedJar.getClassifier()); ArtifactResolutionRequest request = new ArtifactResolutionRequest(); request.setArtifact(excludedShadedJarArtifact); request.setRemoteRepositories(remoteRepositories); request.setLocalRepository(localRepository); ArtifactResolutionResult result = artifactResolver.resolve(request); for (Exception ex : result.getExceptions()) { getLog().error(ex);/* www. j a v a2 s. co m*/ } if (result.hasExceptions()) { throw new MojoExecutionException("Artifact resolution failed"); } return result; }
From source file:com.cloudera.cdk.maven.plugins.CreateDatasetMojo.java
License:Apache License
@Override public void execute() throws MojoExecutionException, MojoFailureException { if (avroSchemaFile == null && avroSchemaReflectClass == null) { throw new IllegalArgumentException( "One of cdk.avroSchemaFile or " + "cdk.avroSchemaReflectClass must be specified"); }/*w w w. java2 s. c o m*/ DatasetRepository repo = getDatasetRepository(); DatasetDescriptor.Builder descriptorBuilder = new DatasetDescriptor.Builder(); configureSchema(descriptorBuilder, avroSchemaFile, avroSchemaReflectClass); if (format.equals(Formats.AVRO.getName())) { descriptorBuilder.format(Formats.AVRO); } else if (format.equals(Formats.PARQUET.getName())) { descriptorBuilder.format(Formats.PARQUET); } else { throw new MojoExecutionException("Unrecognized format: " + format); } if (partitionExpression != null) { descriptorBuilder.partitionStrategy(Accessor.getDefault().fromExpression(partitionExpression)); } repo.create(datasetName, descriptorBuilder.build()); }
From source file:com.cloudera.cdk.maven.plugins.DeployAppMojo.java
License:Apache License
public void execute() throws MojoExecutionException, MojoFailureException { try {//from w w w. ja va2 s . c o m Configuration conf = new Configuration(); Path appPath = getAppPath(); getLog().info("Deploying " + localApplicationFile + " to " + appPath); FileSystem destFileSystem = FileSystem.get(new URI(deployFileSystem), conf); if (destFileSystem.exists(appPath)) { if (!updateApplication) { throw new MojoExecutionException("Application already exists at " + appPath + ". Use 'updateApplication' option to force deployment."); } boolean success = destFileSystem.delete(appPath, true); if (!success) { throw new MojoExecutionException("Error deleting existing application at " + appPath); } } boolean success = FileUtil.copy(localApplicationFile, destFileSystem, appPath, false, conf); if (!success) { throw new MojoExecutionException( "Error creating parent directories " + "for deploying Oozie application"); } } catch (URISyntaxException e) { throw new MojoExecutionException("Syntax error in 'deployFileSystem': " + deployFileSystem, e); } catch (IOException e) { throw new MojoExecutionException("Error deploying application", e); } }
From source file:com.cloudera.cdk.maven.plugins.RunAppMojo.java
License:Apache License
public void execute() throws MojoExecutionException, MojoFailureException { OozieClient oozieClient = new OozieClient(oozieUrl); Properties conf = oozieClient.createConfiguration(); if (jobProperties != null) { conf.putAll(jobProperties);// w w w . j a va 2s . co m } if (hadoopConfiguration != null) { conf.putAll(hadoopConfiguration); String hadoopFs = hadoopConfiguration.getProperty("fs.default.name"); if (hadoopFs == null) { throw new MojoExecutionException("Missing property 'fs.default.name' in " + "hadoopConfiguration"); } String hadoopJobTracker = hadoopConfiguration.getProperty("mapred.job.tracker"); if (hadoopJobTracker == null) { throw new MojoExecutionException( "Missing property 'mapred.job.tracker' in " + "hadoopConfiguration"); } conf.put(NAMENODE_PROPERTY, hadoopFs); conf.put(JOBTRACKER_PROPERTY, hadoopJobTracker); } String appPath = getAppPath().toString(); conf.setProperty(getAppPathPropertyName(), appPath); conf.setProperty(APP_PATH_PROPERTY, appPath); // used in coordinator.xml getLog().info("App path: " + appPath); try { String jobId = oozieClient.run(conf); getLog().info("Running Oozie job " + jobId); } catch (OozieClientException e) { throw new MojoExecutionException("Error running Oozie job", e); } }
From source file:com.cloudera.cdk.maven.plugins.RunToolMojo.java
License:Apache License
public void execute() throws MojoExecutionException, MojoFailureException { List<String> libJars = new ArrayList<String>(); List<URL> classpath = new ArrayList<URL>(); File mainArtifactFile = new File(mavenProject.getBuild().getDirectory(), mavenProject.getBuild().getFinalName() + ".jar"); if (!mainArtifactFile.exists()) { throw new MojoExecutionException("Main artifact missing: " + mainArtifactFile); }/* ww w . ja v a 2s. c o m*/ libJars.add(mainArtifactFile.toString()); classpath.add(toURL(mainArtifactFile)); for (Object a : mavenProject.getRuntimeArtifacts()) { File file = ((Artifact) a).getFile(); classpath.add(toURL(file)); libJars.add(file.toString()); } final List<String> commandArgs = new ArrayList<String>(); for (String key : hadoopConfiguration.stringPropertyNames()) { String value = hadoopConfiguration.getProperty(key); commandArgs.add("-D"); commandArgs.add(key + "=" + value); } if (addDependenciesToDistributedCache) { commandArgs.add("-libjars"); commandArgs.add(Joiner.on(',').join(libJars)); } if (args != null) { for (String arg : args) { commandArgs.add(arg); } } getLog().debug("Running tool with args: " + commandArgs); getLog().debug("Running tool with classpath: " + classpath); Thread executionThread = new Thread() { @Override public void run() { try { Method main = Thread.currentThread().getContextClassLoader().loadClass(toolClass) .getMethod("main", new Class[] { String[].class }); main.invoke(null, new Object[] { commandArgs.toArray(new String[commandArgs.size()]) }); } catch (Exception e) { Thread.currentThread().getThreadGroup().uncaughtException(Thread.currentThread(), e); } } }; ClassLoader parentClassLoader = getClass().getClassLoader(); // use Maven's classloader, not the system one ClassLoader classLoader = new URLClassLoader(classpath.toArray(new URL[classpath.size()]), parentClassLoader); executionThread.setContextClassLoader(classLoader); executionThread.start(); try { executionThread.join(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); getLog().warn("interrupted while joining against thread " + executionThread, e); } }
From source file:com.codecrate.webstart.DeployJnlpMojo.java
License:Apache License
public void execute() throws MojoExecutionException { if (!workDirectory.exists() && !workDirectory.mkdirs()) { throw new MojoExecutionException("Unable to setup JNLP work directory: " + workDirectory); }/* w w w. ja v a 2 s .c o m*/ getLog().info("Deploying JNLP files from " + workDirectory + " to " + url); Repository repo = new Repository(id, url); Wagon wagon = null; try { wagon = getWagon(repo); wagon.connect(repo, wagonManager.getAuthenticationInfo(id)); if (!wagon.supportsDirectoryCopy()) { getLog().warn("Unable to copy directories using the specified protocol: " + url); getLog().info("Attempting to transfer individual files to remote server."); File[] files = workDirectory.listFiles(); for (int x = 0; x < files.length; x++) { File file = files[x]; getLog().info("Transfering file: " + file.getName()); wagon.put(file, file.getName()); } } else { wagon.putDirectory(workDirectory, "."); } } catch (Exception e) { throw new IllegalArgumentException("Unable to deploy JNLP files.", e); } finally { if (wagon != null) { try { wagon.disconnect(); } catch (ConnectionException e) { getLog().warn("Error disconnecting from repo: " + repo, e); } } } }
From source file:com.codecrate.webstart.GenerateJnlpMojo.java
License:Apache License
public void execute() throws MojoExecutionException { if (!workDirectory.exists() && !workDirectory.mkdirs()) { throw new MojoExecutionException("Unable to setup JNLP work directory: " + workDirectory); }/*from w w w .j ava 2 s . co m*/ if (allPermissions) { jarProcessor = new SigningJarProcessor(); } else { jarProcessor = new SimpleFileCopyJarProcessor(); } createJnlpFile(createWriter()); createDistribution(); }
From source file:com.codecrate.webstart.GenerateJnlpMojo.java
License:Apache License
private BufferedWriter createWriter() throws MojoExecutionException { File file = new File(workDirectory, jnlpFile); getLog().info("Creating JNLP File: " + file); try {//from w w w . j a va 2s . c o m return new BufferedWriter(new FileWriter(file)); } catch (IOException e) { throw new MojoExecutionException("Unable to write to: " + file); } }
From source file:com.codedpoetry.maven.dockerplugin.BuildMojo.java
License:Apache License
public void execute() throws MojoExecutionException { try {/*from ww w. j a v a2s . co m*/ String templateUrl = dockerfilePath + "/Dockerfile.vm"; File dockerfilesDirectory = new File(templateUrl); if (!dockerfilesDirectory.exists() || dockerfilesDirectory.isDirectory()) { throw new MojoExecutionException("No Dockerfile found at " + dockerfilePath); } if (Strings.isNullOrEmpty(imageName)) { throw new MojoExecutionException("imageName cannot be null nor empty"); } Map context = new HashMap<>(); context.put("project", project); StringWriter sw = templateRenderer.renderTemplate(templateUrl, context); File dockerfile = fileCreator.createFile(outputDirectory, "Dockerfile", sw.toString()); final DockerClient docker = DefaultDockerClient.fromEnv().build(); Path dockerfiles = outputDirectory.toPath(); String imageAndTag = contatImageNameAndTag(); this.getLog().info("Building image '" + imageName + "' with tag '" + imageTag + "'"); docker.build(dockerfiles, imageAndTag); } catch (Exception e) { throw new MojoExecutionException(e.getMessage(), e); } }