@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); // Build a Map to pass in rather than repeatedly calling hash.add() because each call does lots of expensive work List<ExecutorChunk> chunks = ws.works(i).applicableExecutorChunks(); Map<ExecutorChunk, Integer> toAdd = Maps.newHashMapWithExpectedSize(chunks.size()); for (ExecutorChunk ec : chunks) { toAdd.put(ec, ec.size()*100); } hash.addAll(toAdd); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); // Build a Map to pass in rather than repeatedly calling hash.add() because each call does lots of expensive work List<ExecutorChunk> chunks = ws.works(i).applicableExecutorChunks(); Map<ExecutorChunk, Integer> toAdd = Maps.newHashMapWithExpectedSize(chunks.size()); for (ExecutorChunk ec : chunks) { toAdd.put(ec, ec.size()*100); } hash.addAll(toAdd); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); for (ExecutorChunk ec : ws.works(i).applicableExecutorChunks()) hash.add(ec,ec.size()*100); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); for (ExecutorChunk ec : ws.works(i).applicableExecutorChunks()) hash.add(ec,ec.size()*100); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i = 0; i < ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); for (ExecutorChunk ec : ws.works(i).applicableExecutorChunks()) { hash.add(ec, ec.size() * 100); } hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size() == ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m, task, hashes, 0)) { assert m.isCompletelyValid(); return m; } else { return null; } }
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); for (ExecutorChunk ec : ws.works(i).applicableExecutorChunks()) hash.add(ec,ec.size()*100); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
/** * Number of executors in this chunk. * Alias for size but more readable. */ public int capacity() { return size(); }
/** * Number of executors in this chunk. Alias for size but more readable. */ public int capacity() { return size(); }
/** * Number of executors in this chunk. * Alias for size but more readable. */ public int capacity() { return size(); }
/** * Is this executor chunk and the given work chunk compatible? Can the latter be run on the former? */ public boolean canAccept(WorkChunk c) { if (this.size()<c.size()) return false; // too small compared towork if (c.assignedLabel!=null && !c.assignedLabel.contains(node)) return false; // label mismatch if (!nodeAcl.hasPermission(item.authenticate(), Computer.BUILD)) return false; // tasks don't have a permission to run on this node return true; }
/** * Number of executors in this chunk. * Alias for size but more readable. */ public int capacity() { return size(); }
/** * Is this executor chunk and the given work chunk compatible? Can the latter be run on the former? */ public boolean canAccept(WorkChunk c) { return this.size() >= c.size() && (c.assignedLabel==null || c.assignedLabel.contains(node)); }
/** * Is this executor chunk and the given work chunk compatible? Can the latter be run on the former? */ public boolean canAccept(WorkChunk c) { return this.size() >= c.size() && (c.assignedLabel==null || c.assignedLabel.contains(node)); }
/** * Number of executors in this chunk. * Alias for size but more readable. */ public int capacity() { return size(); }
/** * Is this executor chunk and the given work chunk compatible? Can the * latter be run on the former? */ public boolean canAccept(WorkChunk c) { return this.size() >= c.size() && (c.assignedLabel == null || c.assignedLabel.contains(node)); }
/** * Is this executor chunk and the given work chunk compatible? Can the latter be run on the former? */ public boolean canAccept(WorkChunk c) { return this.size() >= c.size() && (c.assignedLabel==null || c.assignedLabel.contains(node)); }
/** * Is this executor chunk and the given work chunk compatible? Can the latter be run on the former? */ public boolean canAccept(WorkChunk c) { if (this.size()<c.size()) return false; // too small compared towork if (c.assignedLabel!=null && !c.assignedLabel.contains(node)) return false; // label mismatch if (!(Node.SKIP_BUILD_CHECK_ON_FLYWEIGHTS && item.task instanceof Queue.FlyweightTask) && !nodeAcl.hasPermission(item.authenticate(), Computer.BUILD)) return false; // tasks don't have a permission to run on this node return true; }
/** * Number of executors in this chunk. * Alias for size but more readable. */ public int capacity() { return size(); }