/** * Score nodes depending on their loads. * * @param task * @param wc * @param m * @param nodeScore * * @return * * @see jp.ikedam.jenkins.plugins.scoringloadbalancer.ScoringRule#updateScores(hudson.model.Queue.Task, hudson.model.queue.MappingWorksheet.WorkChunk, hudson.model.queue.MappingWorksheet.Mapping, jp.ikedam.jenkins.plugins.scoringloadbalancer.ScoringLoadBalancer.NodesScore) */ @Override public boolean updateScores(Task task, WorkChunk wc, Mapping m, NodesScore nodesScore) { for(ExecutorChunk ec: nodesScore.getExecutorChunks()) { // There are cases that ec.computer.countBusy(), ec.computer.countIdle() // is not yet updated when builds are triggered consequently. int idle = ec.capacity(); for(int i = 0; i < m.size(); ++i) { // count executors about to be assigned if(ec.equals(m.assigned(i))) { idle -= m.get(i).size(); } } int busy = ec.computer.countExecutors() - idle; int busyScore = busy * getScoreForBusyExecutor(); int idleScore = idle * getScoreForIdleExecutor(); nodesScore.addScore(ec, (busyScore + idleScore) * getScale()); } return true; }
@Override public CauseOfBlockage canTake(Node node, Task task) { initPython(); if (pexec.isImplemented(0)) { return (CauseOfBlockage) pexec.execPython("can_take", node, task); } else { return super.canTake(node, task); } }
@Override public CauseOfBlockage canTake(Task task) { initPython(); if (pexec.isImplemented(2)) { return (CauseOfBlockage) pexec.execPython("can_take", task); } else { return super.canTake(task); } }
@Override public CauseOfBlockage canTake(Task task) { initPython(); if (pexec.isImplemented(7)) { return (CauseOfBlockage) pexec.execPython("can_take", task); } else { return super.canTake(task); } }
/** * Decides nodes to run tasks on. * * @param task the root task. * @param worksheet an object containing information of subtasks to execute and nodes tasks can execute on. * * @return mapping from subtasks to nodes. * * @see hudson.model.LoadBalancer#map(hudson.model.Queue.Task, hudson.model.queue.MappingWorksheet) */ @Override public Mapping map(Task task, MappingWorksheet worksheet) { Mapping m = worksheet.new Mapping(); // retrieve scoringRuleList not to behave inconsistently when configuration is updated. List<ScoringRule> scoringRuleList = getScoringRuleList(); if(isEnabled()) { try { if(assignGreedily(m, task, worksheet, scoringRuleList)) { return m; } else { return null; } } catch(Exception e) { LOGGER.log(Level.SEVERE, "Failed to load balance with scores: fallback to preconfigured LoadBalancer", e); } } if(getFallback() != null) { return getFallback().map(task, worksheet); } LOGGER.severe("No LoadBalancer to fall back is defined: Builds are NEVER launched."); return null; }
/** * @param task * @param wc * @param m * @param nodesScore * @return * @throws Exception * @see jp.ikedam.jenkins.plugins.scoringloadbalancer.ScoringRule#updateScores(hudson.model.Queue.Task, hudson.model.queue.MappingWorksheet.WorkChunk, hudson.model.queue.MappingWorksheet.Mapping, jp.ikedam.jenkins.plugins.scoringloadbalancer.ScoringLoadBalancer.NodesScore) */ @Override public synchronized boolean updateScores(Task task, WorkChunk wc, Mapping m, NodesScore nodesScore) throws Exception { if(e != null) { throw e; } calledWorkChunkList.add(wc); if(!reject) { for(Node node: nodesScore.getNodes()) { if(reset) { nodesScore.resetScore(node); } if(scoreMap.containsKey(node)) { nodesScore.addScore(node, scoreMap.get(node)); } } } else { nodesScore.markAllInvalid(); } nodesScoreList.add(nodesScore); return result; }
public DummySubTask(String name, Task owner, long duration) { this.name = name; this.owner = owner; this.duration = duration; this.sameNodeConstraint = this; }
public Task getItem(final Combination combination) { return null; }
public CauseOfBlockage superCanTake(Node node, Task task) { return super.canTake(node, task); }
public CauseOfBlockage superCanTake(Task task) { return super.canTake(task); }
public ExtendedBuildHistoryWidget(Task owner, Iterable<InheritanceProject> baseList, hudson.widgets.HistoryWidget.Adapter<? super InheritanceProject> adapter) { super(owner, baseList, adapter); }
/** * Scores the nodes depending on build results on those nodes. * * @param task * @param wc * @param m * @param nodesScore */ @Override public boolean updateScores(Task task, WorkChunk wc, Mapping m, NodesScore nodesScore) { for(SubTask subtask: wc) { if(!(subtask instanceof AbstractProject)) { return true; } AbstractProject<?,?> project = (AbstractProject<?, ?>)subtask; Set<Node> nodeSet = new HashSet<Node>(nodesScore.getNodes()); AbstractBuild<?,?> build = project.getLastBuild(); for( int pastNum = 0; pastNum < getNumberOfBuilds() && build != null; ++pastNum, build = build.getPreviousBuild() ) { Node node = build.getBuiltOn(); if(!nodeSet.contains(node)) { continue; } int scale = getScale() + getScaleAdjustForOlder() * pastNum; if(Result.SUCCESS == build.getResult()) { nodesScore.addScore(node, getScoreForSuccess() * scale); nodeSet.remove(node); } else if(Result.FAILURE == build.getResult()) { nodesScore.addScore(node, getScoreForFailure() * scale); nodeSet.remove(node); } else if(Result.UNSTABLE == build.getResult()) { nodesScore.addScore(node, getScoreForUnstable() * scale); nodeSet.remove(node); } } } return true; }
private boolean assignGreedily(Mapping m, Task task, MappingWorksheet worksheet, List<ScoringRule> scoringRuleList) throws Exception { return assignGreedily(m, task, worksheet, scoringRuleList, 0); }
/** * Decide nodes to execute subtasks on. * * Decides in a greedy and recursive way as following steps: * <ol> * <li>Pick the first subtask</li> * <li>Score all nodes by calling all enabled {@link ScoringRule}</li> * <li>Pick the node with the highest score. Assign that node to the current subtask.</li> * <li>Pick the next subtask, and back to 2. Scoring is performed for each subtasks, * for the case scores differ for each subtask.</li> * <li>If assignment is failed (e.g. some constrains is broken), pick the next node, and back to 3.</li> * <li>If assignment is succeeded, return that assignment.</li> * </ol> * * @param m * @param task * @param worksheet * @param scoringRuleList * @param targetWorkChunk * @return whether an proper assignment is found. * * @throws Exception */ private boolean assignGreedily(Mapping m, Task task, MappingWorksheet worksheet, List<ScoringRule> scoringRuleList, int targetWorkChunk) throws Exception { if(targetWorkChunk >= worksheet.works.size()) { return m.isCompletelyValid(); } // Current target work chunk (subtask). WorkChunk wc = worksheet.works(targetWorkChunk); // Initialize nodes-to-scores map. List<ExecutorChunk> executors = new ArrayList<ExecutorChunk>(wc.applicableExecutorChunks()); NodesScore nodesScore = new NodesScore(executors); // Score nodes by calling enabled ScoringRules. for(ScoringRule scoringRule: getScoringRuleList()) { if(!scoringRule.updateScores(task, wc, m, nodesScore)) { break; } } sortExecutors(executors, nodesScore); if(isReportScoresEnabled()) { reportScores(wc, executors, nodesScore); } for(ExecutorChunk ec: executors) { if(nodesScore.isInvalid(ec)) { continue; } m.assign(targetWorkChunk, ec); if( m.isPartiallyValid() && assignGreedily(m, task, worksheet, scoringRuleList, targetWorkChunk + 1) ) { return true; } } m.assign(targetWorkChunk,null); // Reset assignment return false; }
@Override public Task getOwnerTask() { return owner; }
/** * Score the nodes. * * Update scores by calling methods of nodesScore. * A node with a larger score is preferred to use. * * If you want not to have scores updated with subsequent {@link ScoringRule}s, return false. * * @param task the root task to build. * @param wc Current work chunk (a set of subtasks that must run on the same node). * @param m currently mapping status. there may be nodes already assigned. * @param nodesScore a map from nodes to their scores * * @return whether to score with subsequent {@link ScoringRule}. * * @throws Exception if any exception occurs, {@link ScoringLoadBalancer} falls back to a {@link LoadBalancer} registered originally. */ public abstract boolean updateScores(Task task, WorkChunk wc, Mapping m, NodesScore nodesScore) throws Exception;