List of usage examples for com.amazonaws.services.codepipeline.model S3ArtifactLocation getBucketName
public String getBucketName()
The name of the Amazon S3 bucket.
From source file:jetbrains.buildServer.codepipeline.CodePipelineBuildListener.java
License:Apache License
private void processJobInput(@NotNull final AgentRunningBuild build) { if (myJobInputProcessed) return;// www . ja va2 s. c om myJobInputProcessed = true; final Map<String, String> params = build.getSharedConfigParameters(); myJobID = getJobId(params); if (myJobID == null) { LOG.debug(msgForBuild("No AWS CodePipeline job found for the build", build)); return; } AWSCommonParams.withAWSClients(params, new AWSCommonParams.WithAWSClients<Void, RuntimeException>() { @Nullable @Override public Void run(@NotNull AWSClients clients) throws RuntimeException { AWSCodePipelineClient codePipelineClient = null; try { codePipelineClient = clients.createCodePipeLineClient(); final JobData jobData = getJobData(codePipelineClient, params); final PipelineContext pipelineContext = jobData.getPipelineContext(); build.getBuildLogger().message("This build is a part of an AWS CodePipeline pipeline: " + pipelineContext.getPipelineName() + "\nLink: https://console.aws.amazon.com/codepipeline/home?region=" + params.get(AWSCommonParams.REGION_NAME_PARAM) + "#/view/" + pipelineContext.getPipelineName() + "\nStage: " + pipelineContext.getStage().getName() + "\nAction: " + pipelineContext.getAction().getName() + "\nJob ID: " + myJobID); final List<Artifact> inputArtifacts = jobData.getInputArtifacts(); if (inputArtifacts.isEmpty()) { LOG.debug( msgForBuild("No input artifacts provided for the job with ID: " + myJobID, build)); } else { final File inputFolder = new File(params.get(ARTIFACT_INPUT_FOLDER_CONFIG_PARAM)); FileUtil.createDir(inputFolder); final Collection<Download> downloads = S3Util.withTransferManager( getArtifactS3Client(jobData.getArtifactCredentials(), params), new S3Util.WithTransferManager<Download>() { @NotNull @Override public Collection<Download> run(@NotNull final TransferManager manager) throws Throwable { return CollectionsUtil.convertCollection(inputArtifacts, new Converter<Download, Artifact>() { @Override public Download createFrom(@NotNull Artifact artifact) { final S3ArtifactLocation s3Location = artifact.getLocation() .getS3Location(); final File destinationFile = getInputArtifactFile( inputFolder, s3Location.getObjectKey()); build.getBuildLogger() .message("Downloading job input artifact " + s3Location.getObjectKey() + " to " + destinationFile.getAbsolutePath()); return manager.download(s3Location.getBucketName(), s3Location.getObjectKey(), destinationFile); } }); } }); // for backward compatibility, TW-47902 for (Download d : downloads) { makeArtifactCopy(inputFolder, getInputArtifactFile(inputFolder, d.getKey()), d.getKey(), build); } if (!jobData.getOutputArtifacts().isEmpty()) { FileUtil.createDir(new File(params.get(ARTIFACT_OUTPUT_FOLDER_CONFIG_PARAM))); } } } catch (Throwable e) { failOnException(codePipelineClient, build, e); } return null; } }); }
From source file:jetbrains.buildServer.codepipeline.CodePipelineBuildListener.java
License:Apache License
private void processJobOutput(@NotNull final AgentRunningBuild build, @NotNull final BuildFinishedStatus buildStatus) { if (myJobID == null) return;/*w ww. java 2s . c om*/ AWSCommonParams.withAWSClients(build.getSharedConfigParameters(), new AWSCommonParams.WithAWSClients<Void, RuntimeException>() { @Nullable @Override public Void run(@NotNull AWSClients clients) throws RuntimeException { AWSCodePipelineClient codePipelineClient = null; try { codePipelineClient = clients.createCodePipeLineClient(); if (build.isBuildFailingOnServer()) { publishJobFailure(codePipelineClient, build, "Build failed"); } else if (BuildFinishedStatus.INTERRUPTED == buildStatus) { publishJobFailure(codePipelineClient, build, "Build interrupted"); } else { final Map<String, String> params = build.getSharedConfigParameters(); final JobData jobData = getJobData(codePipelineClient, params); final List<Artifact> outputArtifacts = jobData.getOutputArtifacts(); if (outputArtifacts.isEmpty()) { LOG.debug(msgForBuild( "No output artifacts expected for the job with ID: " + myJobID, build)); } else { final File artifactOutputFolder = new File( params.get(ARTIFACT_OUTPUT_FOLDER_CONFIG_PARAM)); S3Util.withTransferManager( getArtifactS3Client(jobData.getArtifactCredentials(), params), new S3Util.WithTransferManager<Upload>() { @NotNull @Override public Collection<Upload> run( @NotNull final TransferManager manager) throws Throwable { return CollectionsUtil.convertCollection(outputArtifacts, new Converter<Upload, Artifact>() { @Override public Upload createFrom( @NotNull Artifact artifact) { final File buildArtifact = getBuildArtifact( artifact, jobData.getPipelineContext() .getPipelineName(), artifactOutputFolder, build); final S3ArtifactLocation s3Location = artifact .getLocation().getS3Location(); build.getBuildLogger().message( "Uploading job output artifact " + s3Location.getObjectKey() + " from " + buildArtifact .getAbsolutePath()); return manager.upload(new PutObjectRequest( s3Location.getBucketName(), s3Location.getObjectKey(), buildArtifact) .withSSEAwsKeyManagementParams( getSSEAwsKeyManagementParams( jobData.getEncryptionKey()))); } }); } }); publishJobSuccess(codePipelineClient, build); } } } catch (Throwable e) { failOnException(codePipelineClient, build, e); } return null; } }); }