RawText rt = new RawText(f); List<String> oldLines = new ArrayList<>(rt.size()); for (int i = 0; i < rt.size(); i++) oldLines.add(rt.getString(i)); List<String> newLines = new ArrayList<>(oldLines); for (HunkHeader hh : fh.getHunks()) { System.arraycopy(hh.getBuffer(), hh.getStartOffset(), b, 0, b.length); RawText hrt = new RawText(b); List<String> hunkLines = new ArrayList<>(hrt.size()); for (int i = 0; i < hrt.size(); i++) hunkLines.add(hrt.getString(i)); int pos = 0; for (int j = 1; j < hunkLines.size(); j++) { if (!rt.isMissingNewlineAtEnd()) oldLines.add(""); //$NON-NLS-1$ if (!isChanged(oldLines, newLines))
private static boolean isEndOfLineMissing(RawText text, int line) { return line + 1 == text.size() && text.isMissingNewlineAtEnd(); }
/** * Get the line delimiter for the first line. * * @since 2.0 * @return the line delimiter or <code>null</code> */ public String getLineDelimiter() { if (size() == 0) return null; int e = getEnd(0); if (content[e - 1] != '\n') return null; if (content.length > 1 && e > 1 && content[e - 2] == '\r') return "\r\n"; //$NON-NLS-1$ else return "\n"; //$NON-NLS-1$ }
/** * Get the text for a region of lines. * * @param begin * index of the first line to extract. Note this is 0-based, so * line number 1 is actually index 0. * @param end * index of one past the last line to extract. * @param dropLF * if true the trailing LF ('\n') of the last returned line is * dropped, if present. * @return the text for lines {@code [begin, end)}. */ public String getString(int begin, int end, boolean dropLF) { if (begin == end) return ""; //$NON-NLS-1$ int s = getStart(begin); int e = getEnd(end - 1); if (dropLF && content[e - 1] == '\n') e--; return decode(s, e); }
/** * Push a candidate blob onto the generator's traversal stack. * <p> * Candidates should be pushed in history order from oldest-to-newest. * Applications should push the starting commit first, then the index * revision (if the index is interesting), and finally the working tree copy * (if the working tree is interesting). * * @param description * description of the blob revision, such as "Working Tree". * @param contents * contents of the file. * @return {@code this} * @throws java.io.IOException * the repository cannot be read. */ public BlameGenerator push(String description, byte[] contents) throws IOException { return push(description, new RawText(contents)); }
public static void main(String args[]) throws IOException, GitAPIException { try (Repository repo = CookbookHelper.openJGitCookbookRepository()) { final String[] list = new File(".").list(); if(list == null) { throw new IllegalStateException("Did not find any files at " + new File(".").getAbsolutePath()); } for(String file : list) { if(new File(file).isDirectory()) { continue; } System.out.println("Blaming " + file); final BlameResult result = new Git(repo).blame().setFilePath(file) .setTextComparator(RawTextComparator.WS_IGNORE_ALL).call(); final RawText rawText = result.getResultContents(); for (int i = 0; i < rawText.size(); i++) { final PersonIdent sourceAuthor = result.getSourceAuthor(i); final RevCommit sourceCommit = result.getSourceCommit(i); System.out.println(sourceAuthor.getName() + (sourceCommit != null ? "/" + sourceCommit.getCommitTime() + "/" + sourceCommit.getName() : "") + ": " + rawText.getString(i)); } } } } }
BlameResult(BlameGenerator bg, String path, RawText text) { generator = bg; resultPath = path; resultContents = text; int cnt = text.size(); sourceCommits = new RevCommit[cnt]; sourceAuthors = new PersonIdent[cnt]; sourceCommitters = new PersonIdent[cnt]; sourceLines = new int[cnt]; sourcePaths = new String[cnt]; }
resultPath); c.sourceBlob = id.toObjectId(); c.sourceText = new RawText(ldr.getCachedBytes(Integer.MAX_VALUE)); c.regionList = new Region(0, 0, c.sourceText.size()); remaining = c.sourceText.size(); push(c); return this; c.sourceBlob = idBuf.toObjectId(); c.loadText(reader); c.regionList = new Region(0, 0, c.sourceText.size()); remaining = c.sourceText.size(); push(c); return this;
/** * Determine heuristically whether a byte array represents binary (as * opposed to text) content. * * @param raw * the raw file content. * @return true if raw is likely to be a binary file, false otherwise */ public static boolean isBinary(byte[] raw) { return isBinary(raw, raw.length); }
private RawText getRawText(ObjectId id, Attributes attributes) throws IOException, BinaryBlobException { if (id.equals(ObjectId.zeroId())) return new RawText(new byte[] {}); ObjectLoader loader = LfsFactory.getInstance().applySmudgeFilter( getRepository(), reader.open(id, OBJ_BLOB), attributes.get(Constants.ATTR_MERGE)); int threshold = PackConfig.DEFAULT_BIG_FILE_THRESHOLD; return RawText.load(loader, threshold); }
/** * Get the text for a single line. * * @param i * index of the line to extract. Note this is 0-based, so line * number 1 is actually index 0. * @return the text for the line, without a trailing LF. */ public String getString(int i) { return getString(i, i + 1, true); }
/** * Write a standard patch script line. * * @param prefix * prefix before the line, typically '-', '+', ' '. * @param text * the text object to obtain the line from. * @param cur * line number to output. * @throws java.io.IOException * the stream threw an exception while writing to it. */ protected void writeLine(final char prefix, final RawText text, final int cur) throws IOException { out.write(prefix); text.writeLine(out, cur); out.write('\n'); }
void formatMerge() throws IOException { boolean missingNewlineAtEnd = false; for (MergeChunk chunk : res) { RawText seq = res.getSequences().get(chunk.getSequenceIndex()); writeConflictMetadata(chunk); // the lines with conflict-metadata are written. Now write the chunk for (int i = chunk.getBegin(); i < chunk.getEnd(); i++) writeLine(seq, i); missingNewlineAtEnd = seq.isMissingNewlineAtEnd(); } // one possible leftover: if the merge result ended with a conflict we // have to close the last conflict here if (lastConflictingName != null) writeConflictEnd(); if (!missingNewlineAtEnd) out.beginln(); }
/** * Main method * * @param args * two filenames specifying the contents to be diffed */ public static void main(String[] args) { if (args.length != 2) { System.err.println(JGitText.get().need2Arguments); System.exit(1); } try { RawText a = new RawText(new java.io.File(args[0])); RawText b = new RawText(new java.io.File(args[1])); EditList r = INSTANCE.diff(RawTextComparator.DEFAULT, a, b); System.out.println(r.toString()); } catch (Exception e) { e.printStackTrace(); } } }
public static void main(String args[]) throws IOException, GitAPIException { try (Repository repo = CookbookHelper.openJGitCookbookRepository()) { final String[] list = new File(".").list(); if(list == null) { throw new IllegalStateException("Did not find any files at " + new File(".").getAbsolutePath()); } for(String file : list) { if(new File(file).isDirectory()) { continue; } System.out.println("Blaming " + file); final BlameResult result = new Git(repo).blame().setFilePath(file) .setTextComparator(RawTextComparator.WS_IGNORE_ALL).call(); final RawText rawText = result.getResultContents(); for (int i = 0; i < rawText.size(); i++) { final PersonIdent sourceAuthor = result.getSourceAuthor(i); final RevCommit sourceCommit = result.getSourceCommit(i); System.out.println(sourceAuthor.getName() + (sourceCommit != null ? "/" + sourceCommit.getCommitTime() + "/" + sourceCommit.getName() : "") + ": " + rawText.getString(i)); } } } } }
/** * Push a candidate blob onto the generator's traversal stack. * <p> * Candidates should be pushed in history order from oldest-to-newest. * Applications should push the starting commit first, then the index * revision (if the index is interesting), and finally the working tree copy * (if the working tree is interesting). * * @param description * description of the blob revision, such as "Working Tree". * @param contents * contents of the file. * @return {@code this} * @throws java.io.IOException * the repository cannot be read. */ public BlameGenerator push(String description, RawText contents) throws IOException { if (description == null) description = JGitText.get().blameNotCommittedYet; BlobCandidate c = new BlobCandidate(getRepository(), description, resultPath); c.sourceText = contents; c.regionList = new Region(0, 0, contents.size()); remaining = contents.size(); push(c); return this; }
private static boolean isEndOfLineMissing(final RawText text, final int line) { return line + 1 == text.size() && text.isMissingNewlineAtEnd(); }