List of usage examples for org.eclipse.jgit.api Git rebase
public RebaseCommand rebase()
From source file:com.maiereni.sling.sources.git.GitProjectDownloader.java
License:Apache License
private void update(final Git git) throws Exception { logger.debug("Fetch from remote"); FetchResult fr = git.fetch().call(); // .setRemote(url) Collection<Ref> refs = fr.getAdvertisedRefs(); Iterable<RevCommit> logs = git.log().call(); for (RevCommit rev : logs) { PersonIdent authorIdent = rev.getAuthorIdent(); Date date = authorIdent.getWhen(); String authName = authorIdent.getName(); logger.debug("Commit at " + SDF.format(date) + " by " + authName + ": " + rev.getId().name() + " text: " + rev.getShortMessage()); }// w w w. ja va 2 s . c o m List<Note> notes = git.notesList().call(); for (Ref ref : refs) { if (ref.getName().equals("HEAD")) { git.rebase().setUpstream(ref.getObjectId()).call(); logger.debug("Rebase on HEAD"); for (Note note : notes) { if (note.getName().equals(ref.getObjectId().getName())) { logger.debug("Found note: " + note + " for commit " + ref.getName()); // displaying the contents of the note is done via a simple blob-read ObjectLoader loader = git.getRepository().open(note.getData()); loader.copyTo(System.out); } } } } }
From source file:com.maiereni.synchronizer.git.utils.GitDownloaderImpl.java
License:Apache License
private List<Change> update(final Git git, final GitProperties properties, final Ref localBranch, final Ref tagRef) throws Exception { logger.debug("Fetch from remote"); List<Change> ret = null; FetchCommand cmd = git.fetch();//ww w .jav a2 s. c o m if (StringUtils.isNotBlank(properties.getUserName()) && StringUtils.isNotBlank(properties.getPassword())) { logger.debug("Set credentials"); cmd.setCredentialsProvider( new UsernamePasswordCredentialsProvider(properties.getUserName(), properties.getPassword())); } if (tagRef != null) { RefSpec spec = new RefSpec().setSourceDestination(localBranch.getName(), tagRef.getName()); List<RefSpec> specs = new ArrayList<RefSpec>(); specs.add(spec); cmd.setRefSpecs(specs); } FetchResult fr = cmd.call(); Collection<Ref> refs = fr.getAdvertisedRefs(); for (Ref ref : refs) { if (ref.getName().equals("HEAD")) { ret = checkDifferences(git, localBranch, ref); logger.debug("Rebase on HEAD"); RebaseResult rebaseResult = git.rebase().setUpstream(ref.getObjectId()).call(); if (rebaseResult.getStatus().isSuccessful()) { } } } return ret; }
From source file:de.blizzy.documentr.page.PageStore.java
License:Open Source License
private MergeConflict savePageInternal(String projectName, String branchName, String path, String suffix, Page page, String baseCommit, String rootDir, User user, ILockedRepository repo, boolean push) throws IOException, GitAPIException { Git git = Git.wrap(repo.r()); String headCommit = CommitUtils.getHead(repo.r()).getName(); if ((baseCommit != null) && headCommit.equals(baseCommit)) { baseCommit = null;/* w w w . j ava 2 s . c om*/ } String editBranchName = "_edit_" + String.valueOf((long) (Math.random() * Long.MAX_VALUE)); //$NON-NLS-1$ if (baseCommit != null) { git.branchCreate().setName(editBranchName).setStartPoint(baseCommit).call(); git.checkout().setName(editBranchName).call(); } Map<String, Object> metaMap = new HashMap<String, Object>(); metaMap.put(TITLE, page.getTitle()); metaMap.put(CONTENT_TYPE, page.getContentType()); if (!page.getTags().isEmpty()) { metaMap.put(TAGS, page.getTags()); } metaMap.put(VIEW_RESTRICTION_ROLE, page.getViewRestrictionRole()); Gson gson = new GsonBuilder().enableComplexMapKeySerialization().create(); String json = gson.toJson(metaMap); File workingDir = RepositoryUtil.getWorkingDir(repo.r()); File pagesDir = new File(workingDir, rootDir); File workingFile = Util.toFile(pagesDir, path + DocumentrConstants.META_SUFFIX); FileUtils.write(workingFile, json, Charsets.UTF_8); PageData pageData = page.getData(); if (pageData != null) { workingFile = Util.toFile(pagesDir, path + suffix); FileUtils.writeByteArrayToFile(workingFile, pageData.getData()); } AddCommand addCommand = git.add().addFilepattern(rootDir + "/" + path + DocumentrConstants.META_SUFFIX); //$NON-NLS-1$ if (pageData != null) { addCommand.addFilepattern(rootDir + "/" + path + suffix); //$NON-NLS-1$ } addCommand.call(); PersonIdent ident = new PersonIdent(user.getLoginName(), user.getEmail()); git.commit().setAuthor(ident).setCommitter(ident).setMessage(rootDir + "/" + path + suffix).call(); //$NON-NLS-1$ MergeConflict conflict = null; if (baseCommit != null) { git.rebase().setUpstream(branchName).call(); if (repo.r().getRepositoryState() != RepositoryState.SAFE) { String text = FileUtils.readFileToString(workingFile, Charsets.UTF_8); conflict = new MergeConflict(text, headCommit); git.rebase().setOperation(RebaseCommand.Operation.ABORT).call(); } git.checkout().setName(branchName).call(); if (conflict == null) { git.merge().include(repo.r().resolve(editBranchName)).call(); } git.branchDelete().setBranchNames(editBranchName).setForce(true).call(); } if (push && (conflict == null)) { git.push().call(); } page.setParentPagePath(getParentPagePath(path, repo.r())); if (conflict == null) { PageUtil.updateProjectEditTime(projectName); } return conflict; }
From source file:org.eclipse.orion.server.git.servlets.GitCommitHandlerV1.java
License:Open Source License
private boolean rebase(HttpServletRequest request, HttpServletResponse response, Repository db, String commitToRebase, String rebaseOperation) throws ServletException, JSONException, AmbiguousObjectException, IOException { JSONObject result = new JSONObject(); try {/* w w w . j a v a 2 s . c o m*/ Git git = new Git(db); RebaseCommand rebase = git.rebase(); Operation operation; if (rebaseOperation != null) { operation = Operation.valueOf(rebaseOperation); } else { operation = Operation.BEGIN; } if (commitToRebase != null && !commitToRebase.isEmpty()) { ObjectId objectId = db.resolve(commitToRebase); rebase.setUpstream(objectId); } else if (operation.equals(Operation.BEGIN)) { return statusHandler.handleRequest(request, response, new ServerStatus(IStatus.ERROR, HttpServletResponse.SC_BAD_REQUEST, "Missing commit refId.", null)); } rebase.setOperation(operation); RebaseResult rebaseResult = rebase.call(); result.put(GitConstants.KEY_RESULT, rebaseResult.getStatus().name()); } catch (UnmergedPathsException e) { // this error should be handled by client, so return a proper status result.put(GitConstants.KEY_RESULT, AdditionalRebaseStatus.FAILED_UNMERGED_PATHS.name()); } catch (WrongRepositoryStateException e) { // this error should be handled by client, so return a proper status result.put(GitConstants.KEY_RESULT, AdditionalRebaseStatus.FAILED_WRONG_REPOSITORY_STATE.name()); } catch (IllegalArgumentException e) { return statusHandler.handleRequest(request, response, new ServerStatus(IStatus.ERROR, HttpServletResponse.SC_BAD_REQUEST, "Invalid rebase operation.", e)); } catch (GitAPIException e) { return statusHandler.handleRequest(request, response, new ServerStatus(IStatus.ERROR, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "An error occured when rebasing.", e)); } catch (JGitInternalException e) { // get cause and try to handle if (e.getCause() instanceof org.eclipse.jgit.errors.CheckoutConflictException) { // this error should be handled by client, so return a proper status result.put(GitConstants.KEY_RESULT, AdditionalRebaseStatus.FAILED_PENDING_CHANGES.name()); } else { return statusHandler.handleRequest(request, response, new ServerStatus(IStatus.ERROR, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "An error occured when rebasing.", e)); } } OrionServlet.writeJSONResponse(request, response, result); return true; }
From source file:org.jabylon.team.git.GitTeamProvider.java
License:Open Source License
@Override public Collection<PropertyFileDiff> update(ProjectVersion project, IProgressMonitor monitor) throws TeamProviderException { SubMonitor subMon = SubMonitor.convert(monitor, 100); List<PropertyFileDiff> updatedFiles = new ArrayList<PropertyFileDiff>(); try {//from w ww . j av a2s .c o m Repository repository = createRepository(project); Git git = Git.wrap(repository); FetchCommand fetchCommand = git.fetch(); URI uri = project.getParent().getRepositoryURI(); if (uri != null) fetchCommand.setRemote(stripUserInfo(uri).toString()); String refspecString = "refs/heads/{0}:refs/remotes/origin/{0}"; refspecString = MessageFormat.format(refspecString, project.getName()); RefSpec spec = new RefSpec(refspecString); fetchCommand.setRefSpecs(spec); subMon.subTask("Fetching from remote"); if (!"https".equals(uri.scheme()) && !"http".equals(uri.scheme())) fetchCommand.setTransportConfigCallback(createTransportConfigCallback(project.getParent())); fetchCommand.setCredentialsProvider(createCredentialsProvider(project.getParent())); fetchCommand.setProgressMonitor(new ProgressMonitorWrapper(subMon.newChild(80))); fetchCommand.call(); ObjectId remoteHead = repository.resolve("refs/remotes/origin/" + project.getName() + "^{tree}"); DiffCommand diff = git.diff(); subMon.subTask("Caculating Diff"); diff.setProgressMonitor(new ProgressMonitorWrapper(subMon.newChild(20))); diff.setOldTree(new FileTreeIterator(repository)); CanonicalTreeParser p = new CanonicalTreeParser(); ObjectReader reader = repository.newObjectReader(); try { p.reset(reader, remoteHead); } finally { reader.release(); } diff.setNewTree(p); checkCanceled(subMon); List<DiffEntry> diffs = diff.call(); for (DiffEntry diffEntry : diffs) { checkCanceled(subMon); updatedFiles.add(convertDiffEntry(diffEntry)); LOGGER.trace(diffEntry.toString()); } if (!updatedFiles.isEmpty()) { checkCanceled(subMon); //no more cancel after this point ObjectId lastCommitID = repository .resolve("refs/remotes/origin/" + project.getName() + "^{commit}"); LOGGER.info("Merging remote commit {} to {}/{}", new Object[] { lastCommitID, project.getName(), project.getParent().getName() }); //TODO: use rebase here? if (isRebase(project)) { RebaseCommand rebase = git.rebase(); rebase.setUpstream("refs/remotes/origin/" + project.getName()); RebaseResult result = rebase.call(); if (result.getStatus().isSuccessful()) { LOGGER.info("Rebase finished: {}", result.getStatus()); } else { LOGGER.error("Rebase of {} failed. Attempting abort", project.relativePath()); rebase = git.rebase(); rebase.setOperation(Operation.ABORT); result = rebase.call(); LOGGER.error("Abort finished with {}", result.getStatus()); } } else { MergeCommand merge = git.merge(); merge.include(lastCommitID); MergeResult mergeResult = merge.call(); LOGGER.info("Merge finished: {}", mergeResult.getMergeStatus()); } } else LOGGER.info("Update finished successfully. Nothing to merge, already up to date"); } catch (JGitInternalException e) { throw new TeamProviderException(e); } catch (InvalidRemoteException e) { throw new TeamProviderException(e); } catch (GitAPIException e) { throw new TeamProviderException(e); } catch (AmbiguousObjectException e) { throw new TeamProviderException(e); } catch (IOException e) { throw new TeamProviderException(e); } finally { monitor.done(); } return updatedFiles; }
From source file:org.kie.workbench.common.project.migration.cli.maven.PomEditorWithGitTest.java
License:Apache License
@Test public void testPomEditor() throws Exception { final String repoName = "myrepoxxxx"; HashMap<String, Object> env = new HashMap<>(); env.put("init", Boolean.TRUE); env.put("internal", Boolean.TRUE); final JGitFileSystem fs = (JGitFileSystem) ioService.newFileSystem(URI.create("git://" + repoName), env); ioService.startBatch(fs);/*from w w w . j av a 2 s. c o m*/ ioService.write(fs.getPath("/pom.xml"), new String( java.nio.file.Files.readAllBytes(new File("src/test/projects/generic/pom.xml").toPath()))); ioService.endBatch(); Path tmpCloned = Files.createTempDirectory("cloned"); final File gitClonedFolder = new File(tmpCloned.toFile(), ".clone.git"); final Git cloned = Git.cloneRepository() .setURI(fs.getGit().getRepository().getDirectory().toURI().toString()).setBare(false) .setDirectory(gitClonedFolder).call(); assertNotNull(cloned); Path pomPath = Paths.get("file://" + gitClonedFolder.toString() + "/pom.xml"); byte[] encoded = Files.readAllBytes(pomPath); String pomOriginal = new String(encoded, StandardCharsets.UTF_8); Model model = editor.updatePom(pomPath, cdiWrapper); assertNotNull(model); PullCommand pc = cloned.pull().setRemote("origin").setRebase(Boolean.TRUE); PullResult pullRes = pc.call(); assertEquals(pullRes.getRebaseResult().getStatus(), RebaseResult.Status.UP_TO_DATE); RebaseCommand rb = cloned.rebase().setUpstream("origin/master"); RebaseResult rbResult = rb.setPreserveMerges(true).call(); assertTrue(rbResult.getStatus().isSuccessful()); pomPath = Paths.get("file://" + gitClonedFolder.toString() + "/pom.xml"); encoded = Files.readAllBytes(pomPath); String pomUpdated = new String(encoded, StandardCharsets.UTF_8); assertFalse(pomOriginal.equals(pomUpdated)); }
From source file:org.kie.workbench.common.services.backend.compiler.DefaultMavenCompilerTest.java
License:Apache License
@Test public void buildWithPullRebaseUberfireTest() throws Exception { //Setup origin in memory final URI originRepo = URI.create("git://repo"); final JGitFileSystem origin = (JGitFileSystem) ioService.newFileSystem(originRepo, new HashMap<String, Object>() { {/*from w ww . j a v a 2 s .c o m*/ put("init", Boolean.TRUE); put("internal", Boolean.TRUE); put("listMode", "ALL"); } }); ioService.startBatch(origin); ioService.write(origin.getPath("/pom.xml"), new String(java.nio.file.Files .readAllBytes(new File("src/test/projects/dummy_multimodule_untouched/pom.xml").toPath()))); ioService.write(origin.getPath("/dummyA/src/main/java/dummy/DummyA.java"), new String(java.nio.file.Files.readAllBytes(new File( "src/test/projects/dummy_multimodule_untouched/dummyA/src/main/java/dummy/DummyA.java") .toPath()))); ioService.write(origin.getPath("/dummyB/src/main/java/dummy/DummyB.java"), new String(java.nio.file.Files.readAllBytes(new File( "src/test/projects/dummy_multimodule_untouched/dummyB/src/main/java/dummy/DummyB.java") .toPath()))); ioService.write(origin.getPath("/dummyA/pom.xml"), new String(java.nio.file.Files .readAllBytes(new File("src/test/projects/dummy_multimodule_untouched/dummyA/pom.xml").toPath()))); ioService.write(origin.getPath("/dummyB/pom.xml"), new String(java.nio.file.Files .readAllBytes(new File("src/test/projects/dummy_multimodule_untouched/dummyB/pom.xml").toPath()))); ioService.endBatch(); // clone into a regularfs Path tmpRootCloned = Files.createTempDirectory("cloned"); Path tmpCloned = Files.createDirectories(Paths.get(tmpRootCloned.toString(), ".clone")); final Git cloned = Git.cloneRepository() .setURI(origin.getGit().getRepository().getDirectory().toURI().toString()).setBare(false) .setDirectory(tmpCloned.toFile()).call(); assertThat(cloned).isNotNull(); PullCommand pc = cloned.pull().setRemote("origin").setRebase(Boolean.TRUE); PullResult pullRes = pc.call(); assertThat(pullRes.getRebaseResult().getStatus()).isEqualTo(RebaseResult.Status.UP_TO_DATE);// nothing changed yet RebaseCommand rb = cloned.rebase().setUpstream("origin/master"); RebaseResult rbResult = rb.setPreserveMerges(true).call(); assertThat(rbResult.getStatus().isSuccessful()).isTrue(); //Compile the repo final AFCompiler compiler = KieMavenCompilerFactory .getCompiler(EnumSet.of(KieDecorator.ENABLE_LOGGING, KieDecorator.ENABLE_INCREMENTAL_BUILD)); byte[] encoded = Files.readAllBytes(Paths.get(tmpCloned + "/pom.xml")); String pomAsAstring = new String(encoded, StandardCharsets.UTF_8); assertThat(pomAsAstring).doesNotContain(TestConstants.TAKARI_LIFECYCLE_ARTIFACT); Path prjFolder = Paths.get(tmpCloned + "/"); WorkspaceCompilationInfo info = new WorkspaceCompilationInfo(prjFolder); CompilationRequest req = new DefaultCompilationRequest(mavenRepoPath, info, new String[] { MavenCLIArgs.COMPILE }, Boolean.TRUE); CompilationResponse res = compiler.compile(req); TestUtil.saveMavenLogIfCompilationResponseNotSuccessfull(tmpCloned, res, this.getClass(), testName); assertThat(res.isSuccessful()).isTrue(); Path incrementalConfiguration = Paths.get(prjFolder + TestConstants.TARGET_TAKARI_PLUGIN); assertThat(incrementalConfiguration.toFile()).exists(); encoded = Files.readAllBytes(Paths.get(prjFolder + "/pom.xml")); pomAsAstring = new String(encoded, StandardCharsets.UTF_8); assertThat(pomAsAstring).contains(TestConstants.KIE_TAKARI_LIFECYCLE_ARTIFACT); TestUtil.rm(tmpRootCloned.toFile()); }
From source file:org.kie.workbench.common.services.backend.compiler.kie.KieDefaultMavenCompilerTest.java
License:Apache License
@Test public void buildWithPullRebaseUberfireTest() throws Exception { //Setup origin in memory final URI originRepo = URI.create("git://repo"); final JGitFileSystem origin = (JGitFileSystem) ioService.newFileSystem(originRepo, new HashMap<String, Object>() { {/*from w w w.j a v a 2 s . c o m*/ put("init", Boolean.TRUE); put("internal", Boolean.TRUE); put("listMode", "ALL"); } }); ioService.startBatch(origin); ioService.write(origin.getPath("/pom.xml"), new String(java.nio.file.Files .readAllBytes(new File("src/test/projects/dummy_multimodule_untouched/pom.xml").toPath()))); ioService.write(origin.getPath("/dummyA/src/main/java/dummy/DummyA.java"), new String(java.nio.file.Files.readAllBytes(new File( "src/test/projects/dummy_multimodule_untouched/dummyA/src/main/java/dummy/DummyA.java") .toPath()))); ioService.write(origin.getPath("/dummyB/src/main/java/dummy/DummyB.java"), new String(java.nio.file.Files.readAllBytes(new File( "src/test/projects/dummy_multimodule_untouched/dummyB/src/main/java/dummy/DummyB.java") .toPath()))); ioService.write(origin.getPath("/dummyA/pom.xml"), new String(java.nio.file.Files .readAllBytes(new File("src/test/projects/dummy_multimodule_untouched/dummyA/pom.xml").toPath()))); ioService.write(origin.getPath("/dummyB/pom.xml"), new String(java.nio.file.Files .readAllBytes(new File("src/test/projects/dummy_multimodule_untouched/dummyB/pom.xml").toPath()))); ioService.endBatch(); // clone into a regularfs Path tmpRootCloned = Files.createTempDirectory("cloned"); Path tmpCloned = Files.createDirectories(Paths.get(tmpRootCloned.toString(), ".clone")); final Git cloned = Git.cloneRepository() .setURI(origin.getGit().getRepository().getDirectory().toURI().toString()).setBare(false) .setDirectory(tmpCloned.toFile()).call(); assertThat(cloned).isNotNull(); PullCommand pc = cloned.pull().setRemote("origin").setRebase(Boolean.TRUE); PullResult pullRes = pc.call(); assertThat(pullRes.getRebaseResult().getStatus()).isEqualTo(RebaseResult.Status.UP_TO_DATE);// nothing changed yet RebaseCommand rb = cloned.rebase().setUpstream("origin/master"); RebaseResult rbResult = rb.setPreserveMerges(true).call(); assertThat(rbResult.getStatus().isSuccessful()).isTrue(); //Compile the repo byte[] encoded = Files.readAllBytes(Paths.get(tmpCloned + "/pom.xml")); String pomAsAstring = new String(encoded, StandardCharsets.UTF_8); assertThat(pomAsAstring).doesNotContain(TestConstants.TAKARI_LIFECYCLE_ARTIFACT); Path prjFolder = Paths.get(tmpCloned + "/"); final AFCompiler compiler = KieMavenCompilerFactory .getCompiler(EnumSet.of(KieDecorator.ENABLE_LOGGING, KieDecorator.ENABLE_INCREMENTAL_BUILD)); WorkspaceCompilationInfo info = new WorkspaceCompilationInfo(prjFolder); CompilationRequest req = new DefaultCompilationRequest(mavenRepoPath, info, new String[] { MavenCLIArgs.COMPILE }, Boolean.TRUE); CompilationResponse res = compiler.compile(req); TestUtil.saveMavenLogIfCompilationResponseNotSuccessfull(tmpCloned, res, this.getClass(), testName); assertThat(res.isSuccessful()).isTrue(); Path incrementalConfiguration = Paths.get(prjFolder + TestConstants.TARGET_TAKARI_PLUGIN); assertThat(incrementalConfiguration.toFile()).exists(); encoded = Files.readAllBytes(Paths.get(prjFolder + "/pom.xml")); pomAsAstring = new String(encoded, StandardCharsets.UTF_8); assertThat(pomAsAstring).contains(TestConstants.KIE_TAKARI_LIFECYCLE_ARTIFACT); TestUtil.rm(tmpRootCloned.toFile()); }
From source file:org.kie.workbench.common.services.backend.compiler.nio.DefaultMavenCompilerTest.java
License:Apache License
@Test public void buildWithPullRebaseUberfireTest() throws Exception { //Setup origin in memory final URI originRepo = URI.create("git://repo"); final JGitFileSystem origin = (JGitFileSystem) ioService.newFileSystem(originRepo, new HashMap<String, Object>() { {//w w w.ja va 2 s . co m put("init", Boolean.TRUE); put("internal", Boolean.TRUE); put("listMode", "ALL"); } }); ioService.startBatch(origin); ioService.write(origin.getPath("/dummy/pom.xml"), new String(java.nio.file.Files .readAllBytes(new File("src/test/projects/dummy_multimodule_untouched/pom.xml").toPath()))); ioService.write(origin.getPath("/dummy/dummyA/src/main/java/dummy/DummyA.java"), new String(java.nio.file.Files.readAllBytes(new File( "src/test/projects/dummy_multimodule_untouched/dummyA/src/main/java/dummy/DummyA.java") .toPath()))); ioService.write(origin.getPath("/dummy/dummyB/src/main/java/dummy/DummyB.java"), new String(java.nio.file.Files.readAllBytes(new File( "src/test/projects/dummy_multimodule_untouched/dummyB/src/main/java/dummy/DummyB.java") .toPath()))); ioService.write(origin.getPath("/dummy/dummyA/pom.xml"), new String(java.nio.file.Files .readAllBytes(new File("src/test/projects/dummy_multimodule_untouched/dummyA/pom.xml").toPath()))); ioService.write(origin.getPath("/dummy/dummyB/pom.xml"), new String(java.nio.file.Files .readAllBytes(new File("src/test/projects/dummy_multimodule_untouched/dummyB/pom.xml").toPath()))); ioService.endBatch(); // clone into a regularfs Path tmpRootCloned = Files.createTempDirectory("cloned"); Path tmpCloned = Files.createDirectories(Paths.get(tmpRootCloned.toString(), ".clone")); final Git cloned = Git.cloneRepository().setURI("git://localhost:9418/repo").setBare(false) .setDirectory(tmpCloned.toFile()).call(); assertNotNull(cloned); PullCommand pc = cloned.pull().setRemote("origin").setRebase(Boolean.TRUE); PullResult pullRes = pc.call(); assertTrue(pullRes.getRebaseResult().getStatus().equals(RebaseResult.Status.UP_TO_DATE));// nothing changed yet RebaseCommand rb = cloned.rebase().setUpstream("origin/master"); RebaseResult rbResult = rb.setPreserveMerges(true).call(); assertTrue(rbResult.getStatus().isSuccessful()); //Compile the repo AFCompiler compiler = MavenCompilerFactory.getCompiler(Decorator.LOG_OUTPUT_AFTER); byte[] encoded = Files.readAllBytes(Paths.get(tmpCloned + "/dummy/pom.xml")); String pomAsAstring = new String(encoded, StandardCharsets.UTF_8); Assert.assertFalse(pomAsAstring.contains("<artifactId>takari-lifecycle-plugin</artifactId>")); Path prjFolder = Paths.get(tmpCloned + "/dummy/"); WorkspaceCompilationInfo info = new WorkspaceCompilationInfo(prjFolder); CompilationRequest req = new DefaultCompilationRequest(mavenRepo.toAbsolutePath().toString(), info, new String[] { MavenCLIArgs.CLEAN, MavenCLIArgs.COMPILE }, new HashMap<>(), Boolean.TRUE); CompilationResponse res = compiler.compileSync(req); if (res.getMavenOutput().isPresent() && !res.isSuccessful()) { TestUtil.writeMavenOutputIntoTargetFolder(res.getMavenOutput().get(), "KieDefaultMavenCompilerOnInMemoryFSTest.buildWithPullRebaseUberfireTest"); } assertTrue(res.isSuccessful()); Path incrementalConfiguration = Paths.get( prjFolder + "/target/incremental/io.takari.maven.plugins_takari-lifecycle-plugin_compile_compile"); assertTrue(incrementalConfiguration.toFile().exists()); encoded = Files.readAllBytes(Paths.get(prjFolder + "/pom.xml")); pomAsAstring = new String(encoded, StandardCharsets.UTF_8); assertTrue(pomAsAstring.contains("<artifactId>takari-lifecycle-plugin</artifactId>")); TestUtil.rm(tmpRootCloned.toFile()); }
From source file:org.kie.workbench.common.services.backend.compiler.nio.kie.KieDefaultMavenCompilerTest.java
License:Apache License
@Test public void buildWithPullRebaseUberfireTest() throws Exception { //Setup origin in memory final URI originRepo = URI.create("git://repo"); final JGitFileSystem origin = (JGitFileSystem) ioService.newFileSystem(originRepo, new HashMap<String, Object>() { {//from ww w . j a v a 2 s .com put("init", Boolean.TRUE); put("internal", Boolean.TRUE); put("listMode", "ALL"); } }); ioService.startBatch(origin); ioService.write(origin.getPath("/dummy/pom.xml"), new String(java.nio.file.Files .readAllBytes(new File("src/test/projects/dummy_multimodule_untouched/pom.xml").toPath()))); ioService.write(origin.getPath("/dummy/dummyA/src/main/java/dummy/DummyA.java"), new String(java.nio.file.Files.readAllBytes(new File( "src/test/projects/dummy_multimodule_untouched/dummyA/src/main/java/dummy/DummyA.java") .toPath()))); ioService.write(origin.getPath("/dummy/dummyB/src/main/java/dummy/DummyB.java"), new String(java.nio.file.Files.readAllBytes(new File( "src/test/projects/dummy_multimodule_untouched/dummyB/src/main/java/dummy/DummyB.java") .toPath()))); ioService.write(origin.getPath("/dummy/dummyA/pom.xml"), new String(java.nio.file.Files .readAllBytes(new File("src/test/projects/dummy_multimodule_untouched/dummyA/pom.xml").toPath()))); ioService.write(origin.getPath("/dummy/dummyB/pom.xml"), new String(java.nio.file.Files .readAllBytes(new File("src/test/projects/dummy_multimodule_untouched/dummyB/pom.xml").toPath()))); ioService.endBatch(); // clone into a regularfs Path tmpRootCloned = Files.createTempDirectory("cloned"); Path tmpCloned = Files.createDirectories(Paths.get(tmpRootCloned.toString(), ".clone")); final Git cloned = Git.cloneRepository().setURI("git://localhost:9418/repo").setBare(false) .setDirectory(tmpCloned.toFile()).call(); assertNotNull(cloned); PullCommand pc = cloned.pull().setRemote("origin").setRebase(Boolean.TRUE); PullResult pullRes = pc.call(); assertTrue(pullRes.getRebaseResult().getStatus().equals(RebaseResult.Status.UP_TO_DATE));// nothing changed yet RebaseCommand rb = cloned.rebase().setUpstream("origin/master"); RebaseResult rbResult = rb.setPreserveMerges(true).call(); assertTrue(rbResult.getStatus().isSuccessful()); //Compile the repo AFCompiler compiler = KieMavenCompilerFactory.getCompiler(KieDecorator.LOG_OUTPUT_AFTER); byte[] encoded = Files.readAllBytes(Paths.get(tmpCloned + "/dummy/pom.xml")); String pomAsAstring = new String(encoded, StandardCharsets.UTF_8); Assert.assertFalse(pomAsAstring.contains("<artifactId>takari-lifecycle-plugin</artifactId>")); Path prjFolder = Paths.get(tmpCloned + "/dummy/"); WorkspaceCompilationInfo info = new WorkspaceCompilationInfo(prjFolder); CompilationRequest req = new DefaultCompilationRequest(mavenRepo.toAbsolutePath().toString(), info, new String[] { MavenCLIArgs.CLEAN, MavenCLIArgs.COMPILE }, new HashMap<>(), Boolean.TRUE); CompilationResponse res = compiler.compileSync(req); if (res.getMavenOutput().isPresent() && !res.isSuccessful()) { TestUtil.writeMavenOutputIntoTargetFolder(res.getMavenOutput().get(), "KieDefaultMavenCompilerTest.buildWithPullRebaseUberfireTest"); } assertTrue(res.isSuccessful()); Path incrementalConfiguration = Paths.get( prjFolder + "/target/incremental/io.takari.maven.plugins_takari-lifecycle-plugin_compile_compile"); assertTrue(incrementalConfiguration.toFile().exists()); encoded = Files.readAllBytes(Paths.get(prjFolder + "/pom.xml")); pomAsAstring = new String(encoded, StandardCharsets.UTF_8); assertTrue(pomAsAstring.contains("<artifactId>takari-lifecycle-plugin</artifactId>")); TestUtil.rm(tmpRootCloned.toFile()); }