private void addObject(RevObject object, int pathHashCode) { addObject(object, object.getType(), pathHashCode); }
/** * Include one object to the output file. * <p> * Objects are written in the order they are added. If the same object is * added twice, it may be written twice, creating a larger than necessary * file. * * @param object * the object to add. * @throws org.eclipse.jgit.errors.IncorrectObjectTypeException * the object is an unsupported type. */ public void addObject(RevObject object) throws IncorrectObjectTypeException { if (!exclude(object)) addObject(object, 0); }
throws IOException { while (objectsSource.hasNext()) { addObject(objectsSource.next());
private void filterAndAddObject(@NonNull AnyObjectId src, int type, int pathHashCode, @NonNull Set<? extends AnyObjectId> want) throws IOException { // Check if this object needs to be rejected, doing the cheaper // checks first. boolean reject = filterBlobLimit >= 0 && type == OBJ_BLOB && !want.contains(src) && reader.getObjectSize(src, OBJ_BLOB) > filterBlobLimit; if (!reject) { addObject(src, type, pathHashCode); } }
pw.addObject(rw.parseAny(objectId));
private void addObject(RevObject object, int pathHashCode) { addObject(object, object.getType(), pathHashCode); }
private void addObject(final RevObject object, final int pathHashCode) { addObject(object, object.getType(), pathHashCode); }
/** * Include one object to the output file. * <p> * Objects are written in the order they are added. If the same object is * added twice, it may be written twice, creating a larger than necessary * file. * * @param object * the object to add. * @throws IncorrectObjectTypeException * the object is an unsupported type. */ public void addObject(final RevObject object) throws IncorrectObjectTypeException { if (!exclude(object)) addObject(object, 0); }
/** * Include one object to the output file. * <p> * Objects are written in the order they are added. If the same object is * added twice, it may be written twice, creating a larger than necessary * file. * * @param object * the object to add. * @throws org.eclipse.jgit.errors.IncorrectObjectTypeException * the object is an unsupported type. */ public void addObject(RevObject object) throws IncorrectObjectTypeException { if (!exclude(object)) addObject(object, 0); }
throws IOException { while (objectsSource.hasNext()) { addObject(objectsSource.next());
/** * Prepare the list of objects to be written to the pack stream. * <p> * Iterator <b>exactly</b> determines which objects are included in a pack * and order they appear in pack (except that objects order by type is not * needed at input). This order should conform general rules of ordering * objects in git - by recency and path (type and delta-base first is * internally secured) and responsibility for guaranteeing this order is on * a caller side. Iterator must return each id of object to write exactly * once. * </p> * * @param objectsSource * iterator of object to store in a pack; order of objects within * each type is important, ordering by type is not needed; * allowed types for objects are {@link Constants#OBJ_COMMIT}, * {@link Constants#OBJ_TREE}, {@link Constants#OBJ_BLOB} and * {@link Constants#OBJ_TAG}; objects returned by iterator may be * later reused by caller as object id and type are internally * copied in each iteration. * @throws IOException * when some I/O problem occur during reading objects. */ public void preparePack(@NonNull Iterator<RevObject> objectsSource) throws IOException { while (objectsSource.hasNext()) { addObject(objectsSource.next()); } }
private void filterAndAddObject(@NonNull AnyObjectId src, int type, int pathHashCode, @NonNull Set<? extends AnyObjectId> want) throws IOException { // Check if this object needs to be rejected, doing the cheaper // checks first. boolean reject = filterBlobLimit >= 0 && type == OBJ_BLOB && !want.contains(src) && reader.getObjectSize(src, OBJ_BLOB) > filterBlobLimit; if (!reject) { addObject(src, type, pathHashCode); } }
pw.addObject(pool.lookupAny(id, type)); long objSize = oldRevIdx.findNextOffset(offset, maxOffset) - offset;
private void findObjectsToPackUsingBitmaps( PackWriterBitmapWalker bitmapWalker, Set<? extends ObjectId> want, Set<? extends ObjectId> have) throws MissingObjectException, IncorrectObjectTypeException, IOException { BitmapBuilder haveBitmap = bitmapWalker.findObjects(have, null, true); bitmapWalker.reset(); BitmapBuilder wantBitmap = bitmapWalker.findObjects(want, haveBitmap, false); BitmapBuilder needBitmap = wantBitmap.andNot(haveBitmap); if (useCachedPacks && reuseSupport != null && !reuseValidate && (excludeInPacks == null || excludeInPacks.length == 0)) cachedPacks.addAll( reuseSupport.getCachedPacksAndUpdate(needBitmap)); for (BitmapObject obj : needBitmap) { ObjectId objectId = obj.getObjectId(); if (exclude(objectId)) { needBitmap.remove(objectId); continue; } addObject(objectId, obj.getType(), 0); } if (thin) haveObjects = haveBitmap; }
private void packGarbage(ProgressMonitor pm) throws IOException { PackConfig cfg = new PackConfig(packConfig); cfg.setReuseDeltas(true); cfg.setReuseObjects(true); cfg.setDeltaCompress(false); cfg.setBuildBitmaps(false); try (PackWriter pw = new PackWriter(cfg, ctx); RevWalk pool = new RevWalk(ctx)) { pw.setDeltaBaseAsOffset(true); pw.setReuseDeltaCommits(true); pm.beginTask(JGitText.get().findingGarbage, objectsBefore()); for (DfsPackFile oldPack : packsBefore) { PackIndex oldIdx = oldPack.getPackIndex(ctx); for (PackIndex.MutableEntry ent : oldIdx) { pm.update(1); ObjectId id = ent.toObjectId(); if (pool.lookupOrNull(id) != null || anyPackHas(id)) continue; int type = oldPack.getObjectType(ctx, ent.getOffset()); pw.addObject(pool.lookupAny(id, type)); } } pm.endTask(); if (0 < pw.getObjectCount()) writePack(UNREACHABLE_GARBAGE, pw, pm); } }
pw.addObject(pool.lookupAny(id, type)); long objSize = oldRevIdx.findNextOffset(offset, maxOffset) - offset;