Skip to content

Commit

Permalink
feat(rule,check): add new apis to run a rule synchronously (#1467)
Browse files Browse the repository at this point in the history
* feat: add api to support linting

* finalize api

* dont use let or const in tests

* backtrack and just add rule.runSync code path

* update to es6

* add test for vitualNode, put log functions on rule
  • Loading branch information
straker authored May 10, 2019
1 parent e52d615 commit 84094a1
Show file tree
Hide file tree
Showing 4 changed files with 917 additions and 39 deletions.
48 changes: 48 additions & 0 deletions lib/core/base/check.js
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,54 @@ Check.prototype.run = function(node, options, context, resolve, reject) {
}
};

/**
* Run the check's evaluate function (call `this.evaluate(node, options)`) synchronously
* @param {HTMLElement} node The node to test
* @param {Object} options The options that override the defaults and provide additional
* information for the check
*/
Check.prototype.runSync = function(node, options, context) {
options = options || {};
const { enabled = this.enabled } = options;

if (!enabled) {
return null;
}

const checkOptions = options.options || this.options;
const checkResult = new CheckResult(this);
const checkHelper = axe.utils.checkHelper(checkResult, options);

// throw error if a check is run that requires async behavior
checkHelper.async = function() {
throw new Error('Cannot run async check while in a synchronous run');
};

let result;

try {
result = this.evaluate.call(
checkHelper,
node.actualNode,
checkOptions,
node,
context
);
} catch (e) {
// In the "Audit#run: should run all the rules" test, there is no `node` here. I do
// not know if this is intentional or not, so to be safe, we guard against the
// possible reference error.
if (node && node.actualNode) {
// Save a reference to the node we errored on for futher debugging.
e.errorNode = new DqElement(node.actualNode).toJSON();
}
throw e;
}

checkResult.result = result;
return checkResult;
};

/**
* Override a check's settings after construction to allow for changing options
* without having to implement the entire check
Expand Down
193 changes: 154 additions & 39 deletions lib/core/base/rule.js
Original file line number Diff line number Diff line change
Expand Up @@ -166,20 +166,41 @@ Rule.prototype.runChecks = function(
.catch(reject);
};

/**
* Run a check for a rule synchronously.
*/
Rule.prototype.runChecksSync = function(type, node, options, context) {
'use strict';

const self = this;
let results = [];

this[type].forEach(function(c) {
const check = self._audit.checks[c.id || c];
const option = axe.utils.getCheckOption(check, self.id, options);
results.push(check.runSync(node, option, context));
});

results = results.filter(function(check) {
return check;
});

return { type: type, results: results };
};

/**
* Runs the Rule's `evaluate` function
* @param {Context} context The resolved Context object
* @param {Mixed} options Options specific to this rule
* @param {Function} callback Function to call when evaluate is complete; receives a RuleResult instance
*/
Rule.prototype.run = function(context, options, resolve, reject) {
Rule.prototype.run = function(context, options = {}, resolve, reject) {
if (options.performanceTimer) {
this._trackPerformance();
}

const q = axe.utils.queue();
const ruleResult = new RuleResult(this);
const markStart = 'mark_rule_start_' + this.id;
const markEnd = 'mark_rule_end_' + this.id;
const markChecksStart = 'mark_runchecks_start_' + this.id;
const markChecksEnd = 'mark_runchecks_end_' + this.id;

let nodes;

try {
Expand All @@ -192,14 +213,7 @@ Rule.prototype.run = function(context, options, resolve, reject) {
}

if (options.performanceTimer) {
axe.log(
this.id,
'gather (',
nodes.length,
'):',
axe.utils.performanceTimer.timeElapsed() + 'ms'
);
axe.utils.performanceTimer.mark(markChecksStart);
this._logGatherPerformance(nodes);
}

nodes.forEach(node => {
Expand All @@ -214,22 +228,10 @@ Rule.prototype.run = function(context, options, resolve, reject) {

checkQueue
.then(function(results) {
if (results.length) {
var hasResults = false,
result = {};
results.forEach(function(r) {
var res = r.results.filter(function(result) {
return result;
});
result[r.type] = res;
if (res.length) {
hasResults = true;
}
});
if (hasResults) {
result.node = new axe.utils.DqElement(node.actualNode, options);
ruleResult.nodes.push(result);
}
const result = getResult(results);
if (result) {
result.node = new axe.utils.DqElement(node.actualNode, options);
ruleResult.nodes.push(result);
}
resolveNode();
})
Expand All @@ -242,20 +244,133 @@ Rule.prototype.run = function(context, options, resolve, reject) {
q.defer(resolve => setTimeout(resolve, 0));

if (options.performanceTimer) {
axe.utils.performanceTimer.mark(markChecksEnd);
axe.utils.performanceTimer.mark(markEnd);
axe.utils.performanceTimer.measure(
'rule_' + this.id + '#runchecks',
markChecksStart,
markChecksEnd
);

axe.utils.performanceTimer.measure('rule_' + this.id, markStart, markEnd);
this._logRulePerformance();
}

q.then(() => resolve(ruleResult)).catch(error => reject(error));
};

/**
* Runs the Rule's `evaluate` function synchronously
* @param {Context} context The resolved Context object
* @param {Mixed} options Options specific to this rule
*/
Rule.prototype.runSync = function(context, options = {}) {
if (options.performanceTimer) {
this._trackPerformance();
}

const ruleResult = new RuleResult(this);
let nodes;

try {
nodes = this.gatherAndMatchNodes(context, options);
} catch (error) {
// Exit the rule execution if matches fails
throw new SupportError({ cause: error, ruleId: this.id });
}

if (options.performanceTimer) {
this._logGatherPerformance(nodes);
}

nodes.forEach(node => {
let results = [];
['any', 'all', 'none'].forEach(type => {
results.push(this.runChecksSync(type, node, options, context));
});

const result = getResult(results);
if (result) {
result.node = new axe.utils.DqElement(node.actualNode, options);
ruleResult.nodes.push(result);
}
});

if (options.performanceTimer) {
this._logRulePerformance();
}

return ruleResult;
};

/**
* Add performance tracking properties to the rule
* @private
*/
Rule.prototype._trackPerformance = function() {
this._markStart = 'mark_rule_start_' + this.id;
this._markEnd = 'mark_rule_end_' + this.id;
this._markChecksStart = 'mark_runchecks_start_' + this.id;
this._markChecksEnd = 'mark_runchecks_end_' + this.id;
};

/**
* Log performance of rule.gather
* @private
* @param {Rule} rule The rule to log
* @param {Array} nodes Result of rule.gather
*/
Rule.prototype._logGatherPerformance = function(nodes) {
axe.log(
'gather (',
nodes.length,
'):',
axe.utils.performanceTimer.timeElapsed() + 'ms'
);
axe.utils.performanceTimer.mark(this._markChecksStart);
};

/**
* Log performance of the rule
* @private
* @param {Rule} rule The rule to log
*/
Rule.prototype._logRulePerformance = function() {
axe.utils.performanceTimer.mark(this._markChecksEnd);
axe.utils.performanceTimer.mark(this._markEnd);
axe.utils.performanceTimer.measure(
'runchecks_' + this.id,
this._markChecksStart,
this._markChecksEnd
);

axe.utils.performanceTimer.measure(
'rule_' + this.id,
this._markStart,
this._markEnd
);
};

/**
* Process the results of each check and return the result if a check
* has a result
* @private
* @param {Array} results Array of each check result
* @returns {Object|null}
*/
function getResult(results) {
if (results.length) {
let hasResults = false,
result = {};
results.forEach(function(r) {
const res = r.results.filter(function(result) {
return result;
});
result[r.type] = res;
if (res.length) {
hasResults = true;
}
});

if (hasResults) {
return result;
}

return null;
}
}

/**
* Selects `HTMLElement`s based on configured selector and filters them based on
* the rules matches function
Expand Down
Loading

0 comments on commit 84094a1

Please sign in to comment.