MappingWorksheet worksheet = new MappingWorksheet(bi, slots, Collections.<LoadPredictor>emptyList()); Mapping m = Jenkins.getInstance().getQueue().getLoadBalancer().map(bi.task, worksheet); if (m==null) return null;
/** * Checks if the assignments made thus far are valid an within the constraints. */ public boolean isPartiallyValid() { int[] used = new int[executors.size()]; for (int i=0; i<mapping.length; i++) { ExecutorChunk ec = mapping[i]; if (ec==null) continue; if (!ec.canAccept(works(i))) return false; // invalid assignment if ((used[ec.index] += works(i).size()) > ec.capacity()) return false; } return true; }
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); // Build a Map to pass in rather than repeatedly calling hash.add() because each call does lots of expensive work List<ExecutorChunk> chunks = ws.works(i).applicableExecutorChunks(); Map<ExecutorChunk, Integer> toAdd = Maps.newHashMapWithExpectedSize(chunks.size()); for (ExecutorChunk ec : chunks) { toAdd.put(ec, ec.size()*100); } hash.addAll(toAdd); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
MappingWorksheet ws = new MappingWorksheet(p, candidates); Mapping m = loadBalancer.map(p.task, ws); if (m == null) {
/** * Checks if the assignments made thus far are valid an within the constraints. */ public boolean isPartiallyValid() { int[] used = new int[executors.size()]; for (int i=0; i<mapping.length; i++) { ExecutorChunk ec = mapping[i]; if (ec==null) continue; if (!ec.canAccept(works(i))) return false; // invalid assignment if ((used[ec.index] += works(i).size()) > ec.capacity()) return false; } return true; }
candidates.add(j); MappingWorksheet ws = new MappingWorksheet(p, candidates); Mapping m = loadBalancer.map(p.task, ws); if (m == null)
/** * Checks if the assignments made thus far are valid an within the constraints. */ public boolean isPartiallyValid() { int[] used = new int[executors.size()]; for (int i=0; i<mapping.length; i++) { ExecutorChunk ec = mapping[i]; if (ec==null) continue; if (!ec.canAccept(works(i))) return false; // invalid assignment if ((used[ec.index] += works(i).size()) > ec.capacity()) return false; } return true; }
MappingWorksheet ws = new MappingWorksheet(p, candidates); Mapping m = loadBalancer.map(p.task, ws);
/** * Checks if the assignments made thus far are valid an within the constraints. */ public boolean isPartiallyValid() { int[] used = new int[executors.size()]; for (int i=0; i<mapping.length; i++) { ExecutorChunk ec = mapping[i]; if (ec==null) continue; if (!ec.canAccept(works(i))) return false; // invalid assignment if ((used[ec.index] += works(i).size()) > ec.capacity()) return false; } return true; }
MappingWorksheet worksheet = new MappingWorksheet(bi, slots, Collections.<LoadPredictor>emptyList()); Mapping m = Jenkins.getInstance().getQueue().getLoadBalancer().map(bi.task, worksheet); if (m==null) return null;
/** * Checks if the assignments made thus far are valid an within the constraints. */ public boolean isPartiallyValid() { int[] used = new int[executors.size()]; for (int i=0; i<mapping.length; i++) { ExecutorChunk ec = mapping[i]; if (ec==null) continue; if (!ec.canAccept(works(i))) return false; // invalid assignment if ((used[ec.index] += works(i).size()) > ec.capacity()) return false; } return true; }
MappingWorksheet worksheet = new MappingWorksheet(bi, slots, Collections.<LoadPredictor>emptyList()); Mapping m = Hudson.getInstance().getQueue().getLoadBalancer().map(bi.task, worksheet); if (m==null) return null;
/** * Checks if the assignments made thus far are valid an within the * constraints. */ public boolean isPartiallyValid() { int[] used = new int[executors.size()]; for (int i = 0; i < mapping.length; i++) { ExecutorChunk ec = mapping[i]; if (ec == null) { continue; } if (!ec.canAccept(works(i))) { return false; // invalid assignment } if ((used[ec.index] += works(i).size()) > ec.capacity()) { return false; } } return true; }
candidates.add(j); MappingWorksheet ws = new MappingWorksheet(p, candidates); Mapping m = loadBalancer.map(p.task, ws); if (m == null)
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); // Build a Map to pass in rather than repeatedly calling hash.add() because each call does lots of expensive work List<ExecutorChunk> chunks = ws.works(i).applicableExecutorChunks(); Map<ExecutorChunk, Integer> toAdd = Maps.newHashMapWithExpectedSize(chunks.size()); for (ExecutorChunk ec : chunks) { toAdd.put(ec, ec.size()*100); } hash.addAll(toAdd); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
MappingWorksheet worksheet = new MappingWorksheet(bi, slots, Collections.<LoadPredictor>emptyList()); Mapping m = Hudson.getInstance().getQueue().getLoadBalancer().map(bi.task, worksheet); if (m==null) return null;
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); for (ExecutorChunk ec : ws.works(i).applicableExecutorChunks()) hash.add(ec,ec.size()*100); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
candidates.add(j); MappingWorksheet ws = new MappingWorksheet(p, candidates); Mapping m = loadBalancer.map(p.task, ws); if (m == null)
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); for (ExecutorChunk ec : ws.works(i).applicableExecutorChunks()) hash.add(ec,ec.size()*100); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
MappingWorksheet worksheet = new MappingWorksheet(bi, slots, Collections.<LoadPredictor>emptyList()); Mapping m = Hudson.getInstance().getQueue().getLoadBalancer().map(bi.task, worksheet); if (m == null) {