private List<ObjectToPack> sortByName() { if (sortedByName == null) { int cnt = 0; cnt += objectsLists[OBJ_COMMIT].size(); cnt += objectsLists[OBJ_TREE].size(); cnt += objectsLists[OBJ_BLOB].size(); cnt += objectsLists[OBJ_TAG].size(); sortedByName = new BlockList<>(cnt); sortedByName.addAll(objectsLists[OBJ_COMMIT]); sortedByName.addAll(objectsLists[OBJ_TREE]); sortedByName.addAll(objectsLists[OBJ_BLOB]); sortedByName.addAll(objectsLists[OBJ_TAG]); Collections.sort(sortedByName); } return sortedByName; }
private void beginPack() throws IOException { objectList = new BlockList<>(); objectMap = new ObjectIdOwnerMap<>(); rollback = true; tmpPack = File.createTempFile("insert_", ".pack", db.getDirectory()); //$NON-NLS-1$ //$NON-NLS-2$ packOut = new PackStream(tmpPack); // Write the header as though it were a single object pack. packOut.write(packOut.hdrBuf, 0, writePackHeader(packOut.hdrBuf, 1)); }
private List<DfsObjectToPack> findAllFromPack(DfsPackFile pack, Iterable<ObjectToPack> objects, boolean skipFound) throws IOException { List<DfsObjectToPack> tmp = new BlockList<>(); PackIndex idx = pack.getPackIndex(this); for (ObjectToPack obj : objects) { DfsObjectToPack otp = (DfsObjectToPack) obj; if (skipFound && otp.isFound()) { continue; } long p = idx.findOffset(otp); if (0 < p && !pack.isCorrupt(p)) { otp.setOffset(p); tmp.add(otp); } } return tmp; }
private List<ObjectIdWithOffset> toInclude(DfsPackFile src, DfsReader ctx) throws IOException { PackIndex srcIdx = src.getPackIndex(ctx); List<ObjectIdWithOffset> want = new BlockList<>( (int) srcIdx.getObjectCount()); SCAN: for (PackIndex.MutableEntry ent : srcIdx) { ObjectId id = ent.toObjectId(); RevObject obj = rw.lookupOrNull(id); if (obj != null && (obj.has(added) || obj.has(isBase))) continue; for (ObjectIdSet e : exclude) if (e.contains(id)) continue SCAN; want.add(new ObjectIdWithOffset(id, ent.getOffset())); } Collections.sort(want, new Comparator<ObjectIdWithOffset>() { @Override public int compare(ObjectIdWithOffset a, ObjectIdWithOffset b) { return Long.signum(a.offset - b.offset); } }); return want; }
byOffset = new BlockList<>(objects.size()); sortByOffsetAndIndex(byOffset, positionEntries, objects);
private void beginPack() throws IOException { objectList = new BlockList<>(); objectMap = new ObjectIdOwnerMap<>(); cache = DfsBlockCache.getInstance(); rollback = true; packDsc = db.newPack(DfsObjDatabase.PackSource.INSERT); DfsOutputStream dfsOut = db.writeFile(packDsc, PACK); packDsc.setBlockSize(PACK, dfsOut.blockSize()); packOut = new PackStream(dfsOut); packKey = packDsc.getStreamKey(PACK); // Write the header as though it were a single object pack. byte[] buf = packOut.hdrBuf; System.arraycopy(Constants.PACK_SIGNATURE, 0, buf, 0, 4); NB.encodeInt32(buf, 4, 2); // Always use pack version 2. NB.encodeInt32(buf, 8, 1); // Always assume 1 object. packOut.write(buf, 0, 12); }
BlockList<RevCommit> commits = new BlockList<>(); Set<ObjectId> roots = new HashSet<>(); RevCommit c;
added = rw.newFlag("ADDED"); //$NON-NLS-1$ isBase = rw.newFlag("IS_BASE"); //$NON-NLS-1$ List<RevObject> baseObjects = new BlockList<>();
BlockList<BitmapCommit> selections = new BlockList<>( selectionHelper.reusedCommits.size() + newCommits / recentCommitSpan + 1);
if (cnt <= 4096) { BlockList<ObjectToPack> tmp = new BlockList<>((int) cnt); tmp.addAll(objectsLists[OBJ_TAG]); tmp.addAll(objectsLists[OBJ_COMMIT]);
baseById = new ObjectIdOwnerMap<>(); baseByPos = new LongMap<>(); collisionCheckObjs = new BlockList<>();
private List<ObjectToPack> sortByName() { if (sortedByName == null) { int cnt = 0; cnt += objectsLists[OBJ_COMMIT].size(); cnt += objectsLists[OBJ_TREE].size(); cnt += objectsLists[OBJ_BLOB].size(); cnt += objectsLists[OBJ_TAG].size(); sortedByName = new BlockList<ObjectToPack>(cnt); sortedByName.addAll(objectsLists[OBJ_COMMIT]); sortedByName.addAll(objectsLists[OBJ_TREE]); sortedByName.addAll(objectsLists[OBJ_BLOB]); sortedByName.addAll(objectsLists[OBJ_TAG]); Collections.sort(sortedByName); } return sortedByName; }
private List<ObjectToPack> sortByName() { if (sortedByName == null) { int cnt = 0; cnt += objectsLists[OBJ_COMMIT].size(); cnt += objectsLists[OBJ_TREE].size(); cnt += objectsLists[OBJ_BLOB].size(); cnt += objectsLists[OBJ_TAG].size(); sortedByName = new BlockList<>(cnt); sortedByName.addAll(objectsLists[OBJ_COMMIT]); sortedByName.addAll(objectsLists[OBJ_TREE]); sortedByName.addAll(objectsLists[OBJ_BLOB]); sortedByName.addAll(objectsLists[OBJ_TAG]); Collections.sort(sortedByName); } return sortedByName; }
private List<DfsObjectToPack> findAllFromPack(DfsPackFile pack, Iterable<ObjectToPack> objects) throws IOException { List<DfsObjectToPack> tmp = new BlockList<DfsObjectToPack>(); PackIndex idx = pack.getPackIndex(this); for (ObjectToPack otp : objects) { long p = idx.findOffset(otp); if (0 < p && !pack.isCorrupt(p)) { otp.setOffset(p); tmp.add((DfsObjectToPack) otp); } } return tmp; }
private void beginPack() throws IOException { objectList = new BlockList<>(); objectMap = new ObjectIdOwnerMap<>(); rollback = true; tmpPack = File.createTempFile("insert_", ".pack", db.getDirectory()); //$NON-NLS-1$ //$NON-NLS-2$ packOut = new PackStream(tmpPack); // Write the header as though it were a single object pack. packOut.write(packOut.hdrBuf, 0, writePackHeader(packOut.hdrBuf, 1)); }
private List<DfsObjectToPack> findAllFromPack(DfsPackFile pack, Iterable<ObjectToPack> objects, boolean skipFound) throws IOException { List<DfsObjectToPack> tmp = new BlockList<>(); PackIndex idx = pack.getPackIndex(this); for (ObjectToPack obj : objects) { DfsObjectToPack otp = (DfsObjectToPack) obj; if (skipFound && otp.isFound()) { continue; } long p = idx.findOffset(otp); if (0 < p && !pack.isCorrupt(p)) { otp.setOffset(p); tmp.add(otp); } } return tmp; }
private List<ObjectIdWithOffset> toInclude(DfsPackFile src, DfsReader ctx) throws IOException { PackIndex srcIdx = src.getPackIndex(ctx); List<ObjectIdWithOffset> want = new BlockList<ObjectIdWithOffset>( (int) srcIdx.getObjectCount()); SCAN: for (PackIndex.MutableEntry ent : srcIdx) { ObjectId id = ent.toObjectId(); RevObject obj = rw.lookupOrNull(id); if (obj != null && (obj.has(added) || obj.has(isBase))) continue; for (ObjectIdSet e : exclude) if (e.contains(id)) continue SCAN; want.add(new ObjectIdWithOffset(id, ent.getOffset())); } Collections.sort(want, new Comparator<ObjectIdWithOffset>() { public int compare(ObjectIdWithOffset a, ObjectIdWithOffset b) { return Long.signum(a.offset - b.offset); } }); return want; }
private List<ObjectIdWithOffset> toInclude(DfsPackFile src, DfsReader ctx) throws IOException { PackIndex srcIdx = src.getPackIndex(ctx); List<ObjectIdWithOffset> want = new BlockList<>( (int) srcIdx.getObjectCount()); SCAN: for (PackIndex.MutableEntry ent : srcIdx) { ObjectId id = ent.toObjectId(); RevObject obj = rw.lookupOrNull(id); if (obj != null && (obj.has(added) || obj.has(isBase))) continue; for (ObjectIdSet e : exclude) if (e.contains(id)) continue SCAN; want.add(new ObjectIdWithOffset(id, ent.getOffset())); } Collections.sort(want, new Comparator<ObjectIdWithOffset>() { @Override public int compare(ObjectIdWithOffset a, ObjectIdWithOffset b) { return Long.signum(a.offset - b.offset); } }); return want; }
private void beginPack() throws IOException { objectList = new BlockList<PackedObjectInfo>(); objectMap = new ObjectIdOwnerMap<PackedObjectInfo>(); cache = DfsBlockCache.getInstance(); rollback = true; packDsc = db.newPack(DfsObjDatabase.PackSource.INSERT); packOut = new PackStream(db.writeFile(packDsc, PACK)); packKey = new DfsPackKey(); // Write the header as though it were a single object pack. byte[] buf = packOut.hdrBuf; System.arraycopy(Constants.PACK_SIGNATURE, 0, buf, 0, 4); NB.encodeInt32(buf, 4, 2); // Always use pack version 2. NB.encodeInt32(buf, 8, 1); // Always assume 1 object. packOut.write(buf, 0, 12); }
private void beginPack() throws IOException { objectList = new BlockList<>(); objectMap = new ObjectIdOwnerMap<>(); cache = DfsBlockCache.getInstance(); rollback = true; packDsc = db.newPack(DfsObjDatabase.PackSource.INSERT); DfsOutputStream dfsOut = db.writeFile(packDsc, PACK); packDsc.setBlockSize(PACK, dfsOut.blockSize()); packOut = new PackStream(dfsOut); packKey = packDsc.getStreamKey(PACK); // Write the header as though it were a single object pack. byte[] buf = packOut.hdrBuf; System.arraycopy(Constants.PACK_SIGNATURE, 0, buf, 0, 4); NB.encodeInt32(buf, 4, 2); // Always use pack version 2. NB.encodeInt32(buf, 8, 1); // Always assume 1 object. packOut.write(buf, 0, 12); }