@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); // Build a Map to pass in rather than repeatedly calling hash.add() because each call does lots of expensive work List<ExecutorChunk> chunks = ws.works(i).applicableExecutorChunks(); Map<ExecutorChunk, Integer> toAdd = Maps.newHashMapWithExpectedSize(chunks.size()); for (ExecutorChunk ec : chunks) { toAdd.put(ec, ec.size()*100); } hash.addAll(toAdd); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
ConsistentHash<Node> hash = new ConsistentHash<Node>(NODE_HASH); hash.addAll(hashSource);
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); // Build a Map to pass in rather than repeatedly calling hash.add() because each call does lots of expensive work List<ExecutorChunk> chunks = ws.works(i).applicableExecutorChunks(); Map<ExecutorChunk, Integer> toAdd = Maps.newHashMapWithExpectedSize(chunks.size()); for (ExecutorChunk ec : chunks) { toAdd.put(ec, ec.size()*100); } hash.addAll(toAdd); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); for (ExecutorChunk ec : ws.works(i).applicableExecutorChunks()) hash.add(ec,ec.size()*100); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); for (ExecutorChunk ec : ws.works(i).applicableExecutorChunks()) hash.add(ec,ec.size()*100); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i = 0; i < ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); for (ExecutorChunk ec : ws.works(i).applicableExecutorChunks()) { hash.add(ec, ec.size() * 100); } hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size() == ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m, task, hashes, 0)) { assert m.isCompletelyValid(); return m; } else { return null; } }
@Override public Mapping map(Task task, MappingWorksheet ws) { // build consistent hash for each work chunk List<ConsistentHash<ExecutorChunk>> hashes = new ArrayList<ConsistentHash<ExecutorChunk>>(ws.works.size()); for (int i=0; i<ws.works.size(); i++) { ConsistentHash<ExecutorChunk> hash = new ConsistentHash<ExecutorChunk>(new Hash<ExecutorChunk>() { public String hash(ExecutorChunk node) { return node.getName(); } }); for (ExecutorChunk ec : ws.works(i).applicableExecutorChunks()) hash.add(ec,ec.size()*100); hashes.add(hash); } // do a greedy assignment Mapping m = ws.new Mapping(); assert m.size()==ws.works.size(); // just so that you the reader of the source code don't get confused with the for loop index if (assignGreedily(m,task,hashes,0)) { assert m.isCompletelyValid(); return m; } else return null; }
ConsistentHash<Node> hash = new ConsistentHash<Node>(NODE_HASH); hash.addAll(hashSource);
private void makeBuildable(BuildableItem p) { if(Hudson.FLYWEIGHT_SUPPORT && p.task instanceof FlyweightTask && !ifBlockedByHudsonShutdown(p.task)) { ConsistentHash<Node> hash = new ConsistentHash<Node>(new Hash<Node>() { public String hash(Node node) { return node.getNodeName(); } }); Hudson h = Hudson.getInstance(); hash.add(h, h.getNumExecutors()*100); for (Node n : h.getNodes()) hash.add(n,n.getNumExecutors()*100); Label lbl = p.task.getAssignedLabel(); for (Node n : hash.list(p.task.getFullDisplayName())) { Computer c = n.toComputer(); if (c==null || c.isOffline()) continue; if (lbl!=null && !lbl.contains(n)) continue; c.startFlyWeightTask(new WorkUnitContext(p).createWorkUnit(p.task)); pendings.add(p); return; } // if the execution get here, it means we couldn't schedule it anywhere. // so do the scheduling like other normal jobs. } buildables.put(p.task,p); }
private void makeBuildable(BuildableItem p) { if(Hudson.FLYWEIGHT_SUPPORT && p.task instanceof FlyweightTask && !ifBlockedByHudsonShutdown(p.task)) { ConsistentHash<Node> hash = new ConsistentHash<Node>(new Hash<Node>() { public String hash(Node node) { return node.getNodeName(); } }); Hudson h = Hudson.getInstance(); hash.add(h, h.getNumExecutors()*100); for (Node n : h.getNodes()) hash.add(n,n.getNumExecutors()*100); Label lbl = p.task.getAssignedLabel(); for (Node n : hash.list(p.task.getFullDisplayName())) { Computer c = n.toComputer(); if (c==null || c.isOffline()) continue; if (lbl!=null && !lbl.contains(n)) continue; c.startFlyWeightTask(new WorkUnitContext(p).createWorkUnit(p.task)); pendings.add(p); return; } // if the execution get here, it means we couldn't schedule it anywhere. // so do the scheduling like other normal jobs. } buildables.put(p.task,p); }
private void makeBuildable(BuildableItem p) { if(Hudson.FLYWEIGHT_SUPPORT && p.task instanceof FlyweightTask && !ifBlockedByHudsonShutdown(p.task)) { ConsistentHash<Node> hash = new ConsistentHash<Node>(new Hash<Node>() { public String hash(Node node) { return node.getNodeName(); } }); Hudson h = Hudson.getInstance(); hash.add(h, h.getNumExecutors()*100); for (Node n : h.getNodes()) hash.add(n,n.getNumExecutors()*100); Label lbl = p.task.getAssignedLabel(); for (Node n : hash.list(p.task.getFullDisplayName())) { Computer c = n.toComputer(); if (c==null || c.isOffline()) continue; if (lbl!=null && !lbl.contains(n)) continue; c.startFlyWeightTask(new WorkUnitContext(p).createWorkUnit(p.task)); pendings.add(p); return; } // if the execution get here, it means we couldn't schedule it anywhere. // so do the scheduling like other normal jobs. } buildables.put(p.task,p); }
private void makeBuildable(BuildableItem p) { if (Hudson.FLYWEIGHT_SUPPORT && p.task instanceof FlyweightTask && !ifBlockedByHudsonShutdown(p.task)) { ConsistentHash<Node> hash = new ConsistentHash<Node>(new Hash<Node>() { public String hash(Node node) { return node.getNodeName();