Integrate new engine, correct old specs
corrects edge cases with - full quotes - [url] with nested tags - engine overrides - onebox applying to non http srcs
This commit is contained in:
parent
ee470b5317
commit
f1b38ba4fb
|
@ -227,10 +227,11 @@ export function setup(opts, siteSettings, state) {
|
||||||
opts.markdownIt = true;
|
opts.markdownIt = true;
|
||||||
opts.setup = true;
|
opts.setup = true;
|
||||||
|
|
||||||
if (!opts.discourse.sanitizer) {
|
if (!opts.discourse.sanitizer || !opts.sanitizer) {
|
||||||
const whiteLister = new WhiteLister(opts.discourse);
|
const whiteLister = new WhiteLister(opts.discourse);
|
||||||
opts.sanitizer = opts.discourse.sanitizer = (!!opts.discourse.sanitize) ? a=>sanitize(a, whiteLister) : a=>a;
|
opts.sanitizer = opts.discourse.sanitizer = (!!opts.discourse.sanitize) ? a=>sanitize(a, whiteLister) : a=>a;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function cook(raw, opts) {
|
export function cook(raw, opts) {
|
||||||
|
|
|
@ -1,597 +0,0 @@
|
||||||
import guid from 'pretty-text/guid';
|
|
||||||
import { default as WhiteLister, whiteListFeature } from 'pretty-text/white-lister';
|
|
||||||
import { escape } from 'pretty-text/sanitizer';
|
|
||||||
|
|
||||||
var parser = window.BetterMarkdown,
|
|
||||||
MD = parser.Markdown,
|
|
||||||
DialectHelpers = parser.DialectHelpers,
|
|
||||||
hoisted;
|
|
||||||
|
|
||||||
let currentOpts;
|
|
||||||
|
|
||||||
const emitters = [];
|
|
||||||
const preProcessors = [];
|
|
||||||
const parseNodes = [];
|
|
||||||
|
|
||||||
function findEndPos(text, start, stop, args, offset) {
|
|
||||||
let endPos, nextStart;
|
|
||||||
do {
|
|
||||||
endPos = text.indexOf(stop, offset);
|
|
||||||
if (endPos === -1) { return -1; }
|
|
||||||
nextStart = text.indexOf(start, offset);
|
|
||||||
offset = endPos + stop.length;
|
|
||||||
} while (nextStart !== -1 && nextStart < endPos);
|
|
||||||
return endPos;
|
|
||||||
}
|
|
||||||
|
|
||||||
class DialectHelper {
|
|
||||||
constructor() {
|
|
||||||
this._dialect = MD.dialects.Discourse = DialectHelpers.subclassDialect(MD.dialects.Gruber);
|
|
||||||
this._setup = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
escape(str) {
|
|
||||||
return escape(str);
|
|
||||||
}
|
|
||||||
|
|
||||||
getOptions() {
|
|
||||||
return currentOpts;
|
|
||||||
}
|
|
||||||
|
|
||||||
registerInlineFeature(featureName, start, fn) {
|
|
||||||
this._dialect.inline[start] = function() {
|
|
||||||
if (!currentOpts.features[featureName]) { return; }
|
|
||||||
return fn.apply(this, arguments);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
addPreProcessorFeature(featureName, fn) {
|
|
||||||
preProcessors.push(raw => {
|
|
||||||
if (!currentOpts.features[featureName]) { return raw; }
|
|
||||||
return fn(raw, hoister);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
The simplest kind of replacement possible. Replace a stirng token with JsonML.
|
|
||||||
|
|
||||||
For example to replace all occurrances of :) with a smile image:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
helper.inlineReplace(':)', text => ['img', {src: '/images/smile.png'}]);
|
|
||||||
```
|
|
||||||
**/
|
|
||||||
inlineReplaceFeature(featureName, token, emitter) {
|
|
||||||
this.registerInline(token, (text, match, prev) => {
|
|
||||||
if (!currentOpts.features[featureName]) { return; }
|
|
||||||
return [token.length, emitter.call(this, token, match, prev)];
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
After the parser has been executed, change the contents of a HTML tag.
|
|
||||||
|
|
||||||
Let's say you want to replace the contents of all code tags to prepend
|
|
||||||
"EVIL TROUT HACKED YOUR CODE!":
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
helper.postProcessTag('code', contents => `EVIL TROUT HACKED YOUR CODE!\n\n${contents}`);
|
|
||||||
```
|
|
||||||
**/
|
|
||||||
postProcessTagFeature(featureName, tag, emitter) {
|
|
||||||
this.onParseNode(event => {
|
|
||||||
if (!currentOpts.features[featureName]) { return; }
|
|
||||||
const node = event.node;
|
|
||||||
if (node[0] === tag) {
|
|
||||||
node[node.length-1] = emitter(node[node.length-1]);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
Matches inline using a regular expression. The emitter function is passed
|
|
||||||
the matches from the regular expression.
|
|
||||||
|
|
||||||
For example, this auto links URLs:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
helper.inlineRegexp({
|
|
||||||
matcher: /((?:https?:(?:\/{1,3}|[a-z0-9%])|www\d{0,3}[.])(?:[^\s()<>]+|\([^\s()<>]+\))+(?:\([^\s()<>]+\)|[^`!()\[\]{};:'".,<>?«»“”‘’\s]))/gm,
|
|
||||||
spaceBoundary: true,
|
|
||||||
start: 'http',
|
|
||||||
|
|
||||||
emitter(matches) {
|
|
||||||
const url = matches[1];
|
|
||||||
return ['a', {href: url}, url];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
**/
|
|
||||||
inlineRegexpFeature(featureName, args) {
|
|
||||||
this.registerInline(args.start, function(text, match, prev) {
|
|
||||||
if (!currentOpts.features[featureName]) { return; }
|
|
||||||
if (invalidBoundary(args, prev)) { return; }
|
|
||||||
|
|
||||||
args.matcher.lastIndex = 0;
|
|
||||||
const m = args.matcher.exec(text);
|
|
||||||
if (m) {
|
|
||||||
const result = args.emitter.call(this, m);
|
|
||||||
if (result) {
|
|
||||||
return [m[0].length, result];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
Handles inline replacements surrounded by tokens.
|
|
||||||
|
|
||||||
For example, to handle markdown style bold. Note we use `concat` on the array because
|
|
||||||
the contents are JsonML too since we didn't pass `rawContents` as true. This supports
|
|
||||||
recursive markup.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
helper.inlineBetween({
|
|
||||||
between: '**',
|
|
||||||
wordBoundary: true.
|
|
||||||
emitter(contents) {
|
|
||||||
return ['strong'].concat(contents);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
**/
|
|
||||||
inlineBetweenFeature(featureName, args) {
|
|
||||||
const start = args.start || args.between;
|
|
||||||
const stop = args.stop || args.between;
|
|
||||||
const startLength = start.length;
|
|
||||||
|
|
||||||
this.registerInline(start, function(text, match, prev) {
|
|
||||||
if (!currentOpts.features[featureName]) { return; }
|
|
||||||
if (invalidBoundary(args, prev)) { return; }
|
|
||||||
|
|
||||||
const endPos = findEndPos(text, start, stop, args, startLength);
|
|
||||||
if (endPos === -1) { return; }
|
|
||||||
var between = text.slice(startLength, endPos);
|
|
||||||
|
|
||||||
// If rawcontents is set, don't process inline
|
|
||||||
if (!args.rawContents) {
|
|
||||||
between = this.processInline(between);
|
|
||||||
}
|
|
||||||
|
|
||||||
var contents = args.emitter.call(this, between);
|
|
||||||
if (contents) {
|
|
||||||
return [endPos+stop.length, contents];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
Replaces a block of text between a start and stop. As opposed to inline, these
|
|
||||||
might span multiple lines.
|
|
||||||
|
|
||||||
Here's an example that takes the content between `[code]` ... `[/code]` and
|
|
||||||
puts them inside a `pre` tag:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
helper.replaceBlock({
|
|
||||||
start: /(\[code\])([\s\S]*)/igm,
|
|
||||||
stop: '[/code]',
|
|
||||||
rawContents: true,
|
|
||||||
|
|
||||||
emitter(blockContents) {
|
|
||||||
return ['p', ['pre'].concat(blockContents)];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
**/
|
|
||||||
replaceBlockFeature(featureName, args) {
|
|
||||||
function blockFunc(block, next) {
|
|
||||||
if (!currentOpts.features[featureName]) { return; }
|
|
||||||
|
|
||||||
const linebreaks = currentOpts.traditionalMarkdownLinebreaks;
|
|
||||||
if (linebreaks && args.skipIfTradtionalLinebreaks) { return; }
|
|
||||||
|
|
||||||
args.start.lastIndex = 0;
|
|
||||||
const result = [];
|
|
||||||
const match = (args.start).exec(block);
|
|
||||||
if (!match) { return; }
|
|
||||||
|
|
||||||
const lastChance = () => !next.some(blk => blk.match(args.stop));
|
|
||||||
|
|
||||||
// shave off start tag and leading text, if any.
|
|
||||||
const pos = args.start.lastIndex - match[0].length;
|
|
||||||
const leading = block.slice(0, pos);
|
|
||||||
const trailing = match[2] ? match[2].replace(/^\n*/, "") : "";
|
|
||||||
|
|
||||||
// The other leading block should be processed first! eg a code block wrapped around a code block.
|
|
||||||
if (args.withoutLeading && args.withoutLeading.test(leading)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// just give up if there's no stop tag in this or any next block
|
|
||||||
args.stop.lastIndex = block.length - trailing.length;
|
|
||||||
if (!args.stop.exec(block) && lastChance()) { return; }
|
|
||||||
if (leading.length > 0) {
|
|
||||||
var parsedLeading = this.processBlock(MD.mk_block(leading), []);
|
|
||||||
if (parsedLeading && parsedLeading[0]) {
|
|
||||||
result.push(parsedLeading[0]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (trailing.length > 0) {
|
|
||||||
next.unshift(MD.mk_block(trailing, block.trailing,
|
|
||||||
block.lineNumber + countLines(leading) + (match[2] ? match[2].length : 0) - trailing.length));
|
|
||||||
}
|
|
||||||
|
|
||||||
// go through the available blocks to find the matching stop tag.
|
|
||||||
const contentBlocks = [];
|
|
||||||
let nesting = 0;
|
|
||||||
let actualEndPos = -1;
|
|
||||||
let currentBlock;
|
|
||||||
|
|
||||||
blockloop:
|
|
||||||
while (currentBlock = next.shift()) {
|
|
||||||
|
|
||||||
// collect all the start and stop tags in the current block
|
|
||||||
args.start.lastIndex = 0;
|
|
||||||
const startPos = [];
|
|
||||||
let m;
|
|
||||||
while (m = (args.start).exec(currentBlock)) {
|
|
||||||
startPos.push(args.start.lastIndex - m[0].length);
|
|
||||||
args.start.lastIndex = args.start.lastIndex - (m[2] ? m[2].length : 0);
|
|
||||||
}
|
|
||||||
args.stop.lastIndex = 0;
|
|
||||||
const endPos = [];
|
|
||||||
while (m = (args.stop).exec(currentBlock)) {
|
|
||||||
endPos.push(args.stop.lastIndex - m[0].length);
|
|
||||||
}
|
|
||||||
|
|
||||||
// go through the available end tags:
|
|
||||||
let ep = 0;
|
|
||||||
let sp = 0;
|
|
||||||
while (ep < endPos.length) {
|
|
||||||
if (sp < startPos.length && startPos[sp] < endPos[ep]) {
|
|
||||||
// there's an end tag, but there's also another start tag first. we need to go deeper.
|
|
||||||
sp++; nesting++;
|
|
||||||
} else if (nesting > 0) {
|
|
||||||
// found an end tag, but we must go up a level first.
|
|
||||||
ep++; nesting--;
|
|
||||||
} else {
|
|
||||||
// found an end tag and we're at the top: done! -- or: start tag and end tag are
|
|
||||||
// identical, (i.e. startPos[sp] == endPos[ep]), so we don't do nesting at all.
|
|
||||||
actualEndPos = endPos[ep];
|
|
||||||
break blockloop;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (lastChance()) {
|
|
||||||
// when lastChance() becomes true the first time, currentBlock contains the last
|
|
||||||
// end tag available in the input blocks but it's not on the right nesting level
|
|
||||||
// or we would have terminated the loop already. the only thing we can do is to
|
|
||||||
// treat the last available end tag as tho it were matched with our start tag
|
|
||||||
// and let the emitter figure out how to render the garbage inside.
|
|
||||||
actualEndPos = endPos[endPos.length - 1];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// any left-over start tags still increase the nesting level
|
|
||||||
nesting += startPos.length - sp;
|
|
||||||
contentBlocks.push(currentBlock);
|
|
||||||
}
|
|
||||||
|
|
||||||
const stopLen = currentBlock.match(args.stop)[0].length;
|
|
||||||
const before = currentBlock.slice(0, actualEndPos).replace(/\n*$/, "");
|
|
||||||
const after = currentBlock.slice(actualEndPos + stopLen).replace(/^\n*/, "");
|
|
||||||
if (before.length > 0) contentBlocks.push(MD.mk_block(before, "", currentBlock.lineNumber));
|
|
||||||
if (after.length > 0) next.unshift(MD.mk_block(after, currentBlock.trailing, currentBlock.lineNumber + countLines(before)));
|
|
||||||
|
|
||||||
const emitterResult = args.emitter.call(this, contentBlocks, match);
|
|
||||||
if (emitterResult) { result.push(emitterResult); }
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
|
|
||||||
if (args.priority) {
|
|
||||||
blockFunc.priority = args.priority;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.registerBlock(args.start.toString(), blockFunc);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
After the parser has been executed, post process any text nodes in the HTML document.
|
|
||||||
This is useful if you want to apply a transformation to the text.
|
|
||||||
|
|
||||||
If you are generating HTML from the text, it is preferable to use the replacer
|
|
||||||
functions and do it in the parsing part of the pipeline. This function is best for
|
|
||||||
simple transformations or transformations that have to happen after all earlier
|
|
||||||
processing is done.
|
|
||||||
|
|
||||||
For example, to convert all text to upper case:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
helper.postProcessText(function (text) {
|
|
||||||
return text.toUpperCase();
|
|
||||||
});
|
|
||||||
```
|
|
||||||
**/
|
|
||||||
postProcessTextFeature(featureName, fn) {
|
|
||||||
emitters.push(function () {
|
|
||||||
if (!currentOpts.features[featureName]) { return; }
|
|
||||||
return fn.apply(this, arguments);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
onParseNodeFeature(featureName, fn) {
|
|
||||||
parseNodes.push(function () {
|
|
||||||
if (!currentOpts.features[featureName]) { return; }
|
|
||||||
return fn.apply(this, arguments);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
registerBlockFeature(featureName, name, fn) {
|
|
||||||
const blockFunc = function() {
|
|
||||||
if (!currentOpts.features[featureName]) { return; }
|
|
||||||
return fn.apply(this, arguments);
|
|
||||||
};
|
|
||||||
|
|
||||||
blockFunc.priority = fn.priority;
|
|
||||||
this._dialect.block[name] = blockFunc;
|
|
||||||
}
|
|
||||||
|
|
||||||
applyFeature(featureName, module) {
|
|
||||||
helper.registerInline = (code, fn) => helper.registerInlineFeature(featureName, code, fn);
|
|
||||||
helper.replaceBlock = args => helper.replaceBlockFeature(featureName, args);
|
|
||||||
helper.addPreProcessor = fn => helper.addPreProcessorFeature(featureName, fn);
|
|
||||||
helper.inlineReplace = (token, emitter) => helper.inlineReplaceFeature(featureName, token, emitter);
|
|
||||||
helper.postProcessTag = (token, emitter) => helper.postProcessTagFeature(featureName, token, emitter);
|
|
||||||
helper.inlineRegexp = args => helper.inlineRegexpFeature(featureName, args);
|
|
||||||
helper.inlineBetween = args => helper.inlineBetweenFeature(featureName, args);
|
|
||||||
helper.postProcessText = fn => helper.postProcessTextFeature(featureName, fn);
|
|
||||||
helper.onParseNode = fn => helper.onParseNodeFeature(featureName, fn);
|
|
||||||
helper.registerBlock = (name, fn) => helper.registerBlockFeature(featureName, name, fn);
|
|
||||||
|
|
||||||
module.setup(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
setup() {
|
|
||||||
if (this._setup) { return; }
|
|
||||||
this._setup = true;
|
|
||||||
|
|
||||||
Object.keys(require._eak_seen).forEach(entry => {
|
|
||||||
if (entry.indexOf('discourse-markdown') !== -1) {
|
|
||||||
const module = requirejs(entry);
|
|
||||||
if (module && module.setup) {
|
|
||||||
const featureName = entry.split('/').reverse()[0];
|
|
||||||
helper.whiteList = info => whiteListFeature(featureName, info);
|
|
||||||
|
|
||||||
this.applyFeature(featureName, module);
|
|
||||||
helper.whiteList = undefined;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
MD.buildBlockOrder(this._dialect.block);
|
|
||||||
var index = this._dialect.block.__order__.indexOf("code");
|
|
||||||
if (index > -1) {
|
|
||||||
this._dialect.block.__order__.splice(index, 1);
|
|
||||||
this._dialect.block.__order__.unshift("code");
|
|
||||||
}
|
|
||||||
MD.buildInlinePatterns(this._dialect.inline);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const helper = new DialectHelper();
|
|
||||||
|
|
||||||
export function cook(raw, opts) {
|
|
||||||
currentOpts = opts;
|
|
||||||
|
|
||||||
hoisted = {};
|
|
||||||
|
|
||||||
if (!currentOpts.enableExperimentalMarkdownIt) {
|
|
||||||
raw = hoistCodeBlocksAndSpans(raw);
|
|
||||||
preProcessors.forEach(p => raw = p(raw));
|
|
||||||
}
|
|
||||||
|
|
||||||
const whiteLister = new WhiteLister(opts);
|
|
||||||
|
|
||||||
let result;
|
|
||||||
|
|
||||||
if (currentOpts.enableExperimentalMarkdownIt) {
|
|
||||||
result = opts.sanitizer(
|
|
||||||
requirejs('pretty-text/engines/markdown-it/instance').default(opts).render(raw),
|
|
||||||
whiteLister
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
const tree = parser.toHTMLTree(raw, 'Discourse');
|
|
||||||
result = opts.sanitizer(parser.renderJsonML(parseTree(tree, opts)), whiteLister);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we hoisted out anything, put it back
|
|
||||||
const keys = Object.keys(hoisted);
|
|
||||||
if (keys.length) {
|
|
||||||
let found = true;
|
|
||||||
|
|
||||||
const unhoist = function(key) {
|
|
||||||
result = result.replace(new RegExp(key, "g"), function() {
|
|
||||||
found = true;
|
|
||||||
return hoisted[key];
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
while (found) {
|
|
||||||
found = false;
|
|
||||||
keys.forEach(unhoist);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
export function setup() {
|
|
||||||
helper.setup();
|
|
||||||
}
|
|
||||||
|
|
||||||
function processTextNodes(node, event, emitter) {
|
|
||||||
if (node.length < 2) { return; }
|
|
||||||
|
|
||||||
if (node[0] === '__RAW') {
|
|
||||||
const hash = guid();
|
|
||||||
hoisted[hash] = node[1];
|
|
||||||
node[1] = hash;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (var j=1; j<node.length; j++) {
|
|
||||||
var textContent = node[j];
|
|
||||||
if (typeof textContent === "string") {
|
|
||||||
var result = emitter(textContent, event);
|
|
||||||
if (result) {
|
|
||||||
if (result instanceof Array) {
|
|
||||||
node.splice.apply(node, [j, 1].concat(result));
|
|
||||||
} else {
|
|
||||||
node[j] = result;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
node[j] = textContent;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse a JSON ML tree, using registered handlers to adjust it if necessary.
|
|
||||||
function parseTree(tree, options, path, insideCounts) {
|
|
||||||
|
|
||||||
if (tree instanceof Array) {
|
|
||||||
const event = {node: tree, options, path, insideCounts: insideCounts || {}};
|
|
||||||
parseNodes.forEach(fn => fn(event));
|
|
||||||
|
|
||||||
for (var j=0; j<emitters.length; j++) {
|
|
||||||
processTextNodes(tree, event, emitters[j]);
|
|
||||||
}
|
|
||||||
|
|
||||||
path = path || [];
|
|
||||||
insideCounts = insideCounts || {};
|
|
||||||
|
|
||||||
path.push(tree);
|
|
||||||
|
|
||||||
for (var i=1; i<tree.length; i++) {
|
|
||||||
var n = tree[i],
|
|
||||||
tagName = n[0];
|
|
||||||
|
|
||||||
insideCounts[tagName] = (insideCounts[tagName] || 0) + 1;
|
|
||||||
|
|
||||||
if (n && n.length === 2 && n[0] === "p" && /^<!--([\s\S]*)-->$/.exec(n[1])) {
|
|
||||||
// Remove paragraphs around comment-only nodes.
|
|
||||||
tree[i] = n[1];
|
|
||||||
} else {
|
|
||||||
parseTree(n, options, path, insideCounts);
|
|
||||||
}
|
|
||||||
|
|
||||||
insideCounts[tagName] = insideCounts[tagName] - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If raw nodes are in paragraphs, pull them up
|
|
||||||
if (tree.length === 2 && tree[0] === 'p' && tree[1] instanceof Array && tree[1][0] === "__RAW") {
|
|
||||||
var text = tree[1][1];
|
|
||||||
tree[0] = "__RAW";
|
|
||||||
tree[1] = text;
|
|
||||||
}
|
|
||||||
|
|
||||||
path.pop();
|
|
||||||
}
|
|
||||||
return tree;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns true if there's an invalid word boundary for a match.
|
|
||||||
function invalidBoundary(args, prev) {
|
|
||||||
if (!(args.wordBoundary || args.spaceBoundary || args.spaceOrTagBoundary)) { return false; }
|
|
||||||
|
|
||||||
var last = prev[prev.length - 1];
|
|
||||||
if (typeof last !== "string") { return false; }
|
|
||||||
|
|
||||||
if (args.wordBoundary && (!last.match(/\W$/))) { return true; }
|
|
||||||
if (args.spaceBoundary && (!last.match(/\s$/))) { return true; }
|
|
||||||
if (args.spaceOrTagBoundary && (!last.match(/(\s|\>|\()$/))) { return true; }
|
|
||||||
}
|
|
||||||
|
|
||||||
function countLines(str) {
|
|
||||||
let index = -1, count = 0;
|
|
||||||
while ((index = str.indexOf("\n", index + 1)) !== -1) { count++; }
|
|
||||||
return count;
|
|
||||||
}
|
|
||||||
|
|
||||||
function hoister(t, target, replacement) {
|
|
||||||
const regexp = new RegExp(target.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&'), "g");
|
|
||||||
if (t.match(regexp)) {
|
|
||||||
const hash = guid();
|
|
||||||
t = t.replace(regexp, hash);
|
|
||||||
hoisted[hash] = replacement;
|
|
||||||
}
|
|
||||||
return t;
|
|
||||||
}
|
|
||||||
|
|
||||||
function outdent(t) {
|
|
||||||
return t.replace(/^([ ]{4}|\t)/gm, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
function removeEmptyLines(t) {
|
|
||||||
return t.replace(/^\n+/, "").replace(/\s+$/, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
function hideBackslashEscapedCharacters(t) {
|
|
||||||
return t.replace(/\\\\/g, "\u1E800").replace(/\\`/g, "\u1E8001");
|
|
||||||
}
|
|
||||||
|
|
||||||
function showBackslashEscapedCharacters(t) {
|
|
||||||
return t.replace(/\u1E8001/g, "\\`").replace(/\u1E800/g, "\\\\");
|
|
||||||
}
|
|
||||||
|
|
||||||
function hoistCodeBlocksAndSpans(text) {
|
|
||||||
// replace all "\`" with a single character
|
|
||||||
text = hideBackslashEscapedCharacters(text);
|
|
||||||
|
|
||||||
// /!\ the order is important /!\
|
|
||||||
|
|
||||||
// fenced code blocks (AKA GitHub code blocks)
|
|
||||||
text = text.replace(/(^\n*|\n)```([a-z0-9\-]*)\n([\s\S]*?)\n```/g, function(_, before, language, content) {
|
|
||||||
const hash = guid();
|
|
||||||
hoisted[hash] = escape(showBackslashEscapedCharacters(removeEmptyLines(content)));
|
|
||||||
return before + "```" + language + "\n" + hash + "\n```";
|
|
||||||
});
|
|
||||||
|
|
||||||
// markdown code blocks
|
|
||||||
text = text.replace(/(^\n*|\n\n)((?:(?:[ ]{4}|\t).*\n*)+)/g, function(match, before, content, index) {
|
|
||||||
// make sure we aren't in a list
|
|
||||||
var previousLine = text.slice(0, index).trim().match(/.*$/);
|
|
||||||
if (previousLine && previousLine[0].length) {
|
|
||||||
previousLine = previousLine[0].trim();
|
|
||||||
if (/^(?:\*|\+|-|\d+\.)\s+/.test(previousLine)) {
|
|
||||||
return match;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// we can safely hoist the code block
|
|
||||||
const hash = guid();
|
|
||||||
hoisted[hash] = escape(outdent(showBackslashEscapedCharacters(removeEmptyLines(content))));
|
|
||||||
return before + " " + hash + "\n";
|
|
||||||
});
|
|
||||||
|
|
||||||
// <pre>...</pre> code blocks
|
|
||||||
text = text.replace(/(\s|^)<pre>([\s\S]*?)<\/pre>/ig, function(_, before, content) {
|
|
||||||
const hash = guid();
|
|
||||||
hoisted[hash] = escape(showBackslashEscapedCharacters(removeEmptyLines(content)));
|
|
||||||
return before + "<pre>" + hash + "</pre>";
|
|
||||||
});
|
|
||||||
|
|
||||||
// code spans (double & single `)
|
|
||||||
["``", "`"].forEach(function(delimiter) {
|
|
||||||
var regexp = new RegExp("(^|[^`])" + delimiter + "([^`\\n]+?)" + delimiter + "([^`]|$)", "g");
|
|
||||||
text = text.replace(regexp, function(_, before, content, after) {
|
|
||||||
const hash = guid();
|
|
||||||
hoisted[hash] = escape(showBackslashEscapedCharacters(content.trim()));
|
|
||||||
return before + delimiter + hash + delimiter + after;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// replace back all weird character with "\`"
|
|
||||||
return showBackslashEscapedCharacters(text);
|
|
||||||
}
|
|
|
@ -57,6 +57,7 @@ function tokanizeBBCode(state, silent, ruler) {
|
||||||
|
|
||||||
let token = state.push('text', '' , 0);
|
let token = state.push('text', '' , 0);
|
||||||
token.content = state.src.slice(pos, pos+tagInfo.length);
|
token.content = state.src.slice(pos, pos+tagInfo.length);
|
||||||
|
token.meta = 'bbcode';
|
||||||
|
|
||||||
state.delimiters.push({
|
state.delimiters.push({
|
||||||
bbInfo: tagInfo,
|
bbInfo: tagInfo,
|
||||||
|
@ -105,10 +106,15 @@ function processBBCode(state, silent) {
|
||||||
let tag, className;
|
let tag, className;
|
||||||
|
|
||||||
if (typeof tagInfo.rule.wrap === 'function') {
|
if (typeof tagInfo.rule.wrap === 'function') {
|
||||||
if (!tagInfo.rule.wrap(token, tagInfo)) {
|
let content = "";
|
||||||
return false;
|
for (let j = startDelim.token+1; j < endDelim.token; j++) {
|
||||||
|
let inner = state.tokens[j];
|
||||||
|
if (inner.type === 'text' && inner.meta !== 'bbcode') {
|
||||||
|
content += inner.content;
|
||||||
}
|
}
|
||||||
tag = token.tag;
|
}
|
||||||
|
tagInfo.rule.wrap(token, state.tokens[endDelim.token], tagInfo, content);
|
||||||
|
continue;
|
||||||
} else {
|
} else {
|
||||||
let split = tagInfo.rule.wrap.split('.');
|
let split = tagInfo.rule.wrap.split('.');
|
||||||
tag = split[0];
|
tag = split[0];
|
||||||
|
@ -160,19 +166,35 @@ export function setup(helper) {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const simpleUrlRegex = /^http[s]?:\/\//;
|
||||||
ruler.push('url', {
|
ruler.push('url', {
|
||||||
tag: 'url',
|
tag: 'url',
|
||||||
replace: function(state, tagInfo, content) {
|
wrap: function(startToken, endToken, tagInfo, content) {
|
||||||
let token;
|
|
||||||
|
|
||||||
token = state.push('link_open', 'a', 1);
|
const url = (tagInfo.attrs['_default'] || content).trim();
|
||||||
token.attrs = [['href', content], ['data-bbcode', 'true']];
|
|
||||||
|
|
||||||
token = state.push('text', '', 0);
|
if (simpleUrlRegex.test(url)) {
|
||||||
token.content = content;
|
startToken.type = 'link_open';
|
||||||
|
startToken.tag = 'a';
|
||||||
|
startToken.attrs = [['href', url], ['data-bbcode', 'true']];
|
||||||
|
startToken.content = '';
|
||||||
|
startToken.nesting = 1;
|
||||||
|
|
||||||
token = state.push('link_close', 'a', -1);
|
endToken.type = 'link_close';
|
||||||
return true;
|
endToken.tag = 'a';
|
||||||
|
endToken.content = '';
|
||||||
|
endToken.nesting = -1;
|
||||||
|
} else {
|
||||||
|
// just strip the bbcode tag
|
||||||
|
endToken.content = '';
|
||||||
|
startToken.content = '';
|
||||||
|
|
||||||
|
// edge case, we don't want this detected as a onebox if auto linked
|
||||||
|
// this ensures it is not stripped
|
||||||
|
startToken.type = 'html_inline';
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -180,9 +202,10 @@ export function setup(helper) {
|
||||||
tag: 'email',
|
tag: 'email',
|
||||||
replace: function(state, tagInfo, content) {
|
replace: function(state, tagInfo, content) {
|
||||||
let token;
|
let token;
|
||||||
|
let email = tagInfo.attrs['_default'] || content;
|
||||||
|
|
||||||
token = state.push('link_open', 'a', 1);
|
token = state.push('link_open', 'a', 1);
|
||||||
token.attrs = [['href', 'mailto:' + content], ['data-bbcode', 'true']];
|
token.attrs = [['href', 'mailto:' + email], ['data-bbcode', 'true']];
|
||||||
|
|
||||||
token = state.push('text', '', 0);
|
token = state.push('text', '', 0);
|
||||||
token.content = content;
|
token.content = content;
|
||||||
|
|
|
@ -22,6 +22,7 @@ function applyOnebox(state, silent) {
|
||||||
if (j === 0 && token.leading_space) {
|
if (j === 0 && token.leading_space) {
|
||||||
continue;
|
continue;
|
||||||
} else if (j > 0) {
|
} else if (j > 0) {
|
||||||
|
|
||||||
let prevSibling = token.children[j-1];
|
let prevSibling = token.children[j-1];
|
||||||
|
|
||||||
if (prevSibling.tag !== 'br' || prevSibling.leading_space) {
|
if (prevSibling.tag !== 'br' || prevSibling.leading_space) {
|
||||||
|
@ -45,8 +46,12 @@ function applyOnebox(state, silent) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// we already know text matches cause it is an auto link
|
// edge case ... what if this is not http or protocoless?
|
||||||
|
if (!/^http|^\/\//i.test(attrs[0][1])) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// we already know text matches cause it is an auto link
|
||||||
if (!close || close.type !== "link_close") {
|
if (!close || close.type !== "link_close") {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -71,6 +76,7 @@ function applyOnebox(state, silent) {
|
||||||
} else {
|
} else {
|
||||||
// decorate...
|
// decorate...
|
||||||
attrs.push(["class", "onebox"]);
|
attrs.push(["class", "onebox"]);
|
||||||
|
attrs.push(["target", "_blank"]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,7 @@ const rule = {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (split[i].indexOf(/full:\s*true/) === 0) {
|
if (/full:\s*true/.test(split[i])) {
|
||||||
full = true;
|
full = true;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,9 @@
|
||||||
import { cook as cookIt, setup as setupIt } from 'pretty-text/engines/discourse-markdown-it';
|
import { cook as cookIt, setup as setupIt } from 'pretty-text/engines/discourse-markdown-it';
|
||||||
import { sanitize } from 'pretty-text/sanitizer';
|
|
||||||
import WhiteLister from 'pretty-text/white-lister';
|
|
||||||
|
|
||||||
const _registerFns = [];
|
export function registerOption() {
|
||||||
const identity = value => value;
|
if (window.console) {
|
||||||
|
window.console.log("registerOption is deprecated");
|
||||||
export function registerOption(fn) {
|
}
|
||||||
_registerFns.push(fn);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function buildOptions(state) {
|
export function buildOptions(state) {
|
||||||
|
@ -24,7 +21,7 @@ export function buildOptions(state) {
|
||||||
emojiUnicodeReplacer
|
emojiUnicodeReplacer
|
||||||
} = state;
|
} = state;
|
||||||
|
|
||||||
const features = {
|
let features = {
|
||||||
'bold-italics': true,
|
'bold-italics': true,
|
||||||
'auto-link': true,
|
'auto-link': true,
|
||||||
'mentions': true,
|
'mentions': true,
|
||||||
|
@ -36,6 +33,10 @@ export function buildOptions(state) {
|
||||||
'newline': !siteSettings.traditional_markdown_linebreaks
|
'newline': !siteSettings.traditional_markdown_linebreaks
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (state.features) {
|
||||||
|
features = _.merge(features, state.features);
|
||||||
|
}
|
||||||
|
|
||||||
const options = {
|
const options = {
|
||||||
sanitize: true,
|
sanitize: true,
|
||||||
getURL,
|
getURL,
|
||||||
|
@ -54,6 +55,8 @@ export function buildOptions(state) {
|
||||||
markdownIt: true
|
markdownIt: true
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// note, this will mutate options due to the way the API is designed
|
||||||
|
// may need a refactor
|
||||||
setupIt(options, siteSettings, state);
|
setupIt(options, siteSettings, state);
|
||||||
|
|
||||||
return options;
|
return options;
|
||||||
|
@ -61,9 +64,14 @@ export function buildOptions(state) {
|
||||||
|
|
||||||
export default class {
|
export default class {
|
||||||
constructor(opts) {
|
constructor(opts) {
|
||||||
this.opts = opts || {};
|
if (!opts) {
|
||||||
this.opts.features = this.opts.features || {};
|
opts = buildOptions({ siteSettings: {}});
|
||||||
this.opts.sanitizer = (!!this.opts.sanitize) ? (this.opts.sanitizer || sanitize) : identity;
|
}
|
||||||
|
this.opts = opts;
|
||||||
|
}
|
||||||
|
|
||||||
|
disableSanitizer() {
|
||||||
|
this.opts.sanitizer = this.opts.discourse.sanitizer = ident => ident;
|
||||||
}
|
}
|
||||||
|
|
||||||
cook(raw) {
|
cook(raw) {
|
||||||
|
@ -75,6 +83,6 @@ export default class {
|
||||||
}
|
}
|
||||||
|
|
||||||
sanitize(html) {
|
sanitize(html) {
|
||||||
return this.opts.sanitizer(html, new WhiteLister(this.opts));
|
return this.opts.sanitizer(html).trim();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -175,12 +175,14 @@ module PrettyText
|
||||||
|
|
||||||
buffer << "__textOptions = __buildOptions(__optInput);\n"
|
buffer << "__textOptions = __buildOptions(__optInput);\n"
|
||||||
|
|
||||||
# Be careful disabling sanitization. We allow for custom emails
|
|
||||||
if opts[:sanitize] == false
|
|
||||||
buffer << ('__textOptions.sanitize = false;')
|
|
||||||
end
|
|
||||||
|
|
||||||
buffer << ("__pt = new __PrettyText(__textOptions);")
|
buffer << ("__pt = new __PrettyText(__textOptions);")
|
||||||
|
|
||||||
|
# Be careful disabling sanitization. We allow for custom emails
|
||||||
|
if opts[:sanitize] == false
|
||||||
|
buffer << ('__pt.disableSanitizer();')
|
||||||
|
end
|
||||||
|
|
||||||
opts = context.eval(buffer)
|
opts = context.eval(buffer)
|
||||||
|
|
||||||
DiscourseEvent.trigger(:markdown_context, context)
|
DiscourseEvent.trigger(:markdown_context, context)
|
||||||
|
|
|
@ -61,7 +61,7 @@ describe PrettyText do
|
||||||
[/quote]
|
[/quote]
|
||||||
MD
|
MD
|
||||||
html = <<~HTML
|
html = <<~HTML
|
||||||
<aside class="quote" data-post="123" data-topic="456">
|
<aside class="quote" data-post="123" data-topic="456" data-full="true">
|
||||||
<div class="title">
|
<div class="title">
|
||||||
<div class="quote-controls"></div>
|
<div class="quote-controls"></div>
|
||||||
<img alt width="20" height="20" src="//test.localhost/uploads/default/avatars/42d/57c/46ce7ee487/40.png" class="avatar"> #{user.username}:</div>
|
<img alt width="20" height="20" src="//test.localhost/uploads/default/avatars/42d/57c/46ce7ee487/40.png" class="avatar"> #{user.username}:</div>
|
||||||
|
@ -786,6 +786,7 @@ HTML
|
||||||
expect(PrettyText.cook("<http://a.com>")).not_to include('onebox')
|
expect(PrettyText.cook("<http://a.com>")).not_to include('onebox')
|
||||||
expect(PrettyText.cook(" http://a.com")).not_to include('onebox')
|
expect(PrettyText.cook(" http://a.com")).not_to include('onebox')
|
||||||
expect(PrettyText.cook("a\n http://a.com")).not_to include('onebox')
|
expect(PrettyText.cook("a\n http://a.com")).not_to include('onebox')
|
||||||
|
expect(PrettyText.cook("sam@sam.com")).not_to include('onebox')
|
||||||
end
|
end
|
||||||
|
|
||||||
it "can handle bbcode" do
|
it "can handle bbcode" do
|
||||||
|
@ -857,7 +858,13 @@ HTML
|
||||||
|
|
||||||
it "supports url bbcode" do
|
it "supports url bbcode" do
|
||||||
cooked = PrettyText.cook "[url]http://sam.com[/url]"
|
cooked = PrettyText.cook "[url]http://sam.com[/url]"
|
||||||
html = '<p><a href="http://sam.com" data-bbcode="true" rel="nofollow noopener">http://sam.com</a></p>'
|
html = '<p><a href="http://sam.com" data-bbcode="true" rel="nofollow noopener">http://sam.com</a></p>';
|
||||||
|
expect(cooked).to eq(html)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "supports nesting tags in url" do
|
||||||
|
cooked = PrettyText.cook("[url=http://sam.com][b]I am sam[/b][/url]")
|
||||||
|
html = '<p><a href="http://sam.com" data-bbcode="true" rel="nofollow noopener"><span class="bbcode-b">I am sam</span></a></p>';
|
||||||
expect(cooked).to eq(html)
|
expect(cooked).to eq(html)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -875,21 +882,36 @@ HTML
|
||||||
|
|
||||||
it "support special handling for space in urls" do
|
it "support special handling for space in urls" do
|
||||||
cooked = PrettyText.cook "http://testing.com?a%20b"
|
cooked = PrettyText.cook "http://testing.com?a%20b"
|
||||||
html = '<p><a href="http://testing.com?a%20b" class="onebox" rel="nofollow noopener">http://testing.com?a%20b</a></p>'
|
html = '<p><a href="http://testing.com?a%20b" class="onebox" target="_blank" rel="nofollow noopener">http://testing.com?a%20b</a></p>'
|
||||||
expect(cooked).to eq(html)
|
expect(cooked).to eq(html)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "supports onebox for decoded urls" do
|
it "supports onebox for decoded urls" do
|
||||||
cooked = PrettyText.cook "http://testing.com?a%50b"
|
cooked = PrettyText.cook "http://testing.com?a%50b"
|
||||||
html = '<p><a href="http://testing.com?a%50b" class="onebox" rel="nofollow noopener">http://testing.com?aPb</a></p>'
|
html = '<p><a href="http://testing.com?a%50b" class="onebox" target="_blank" rel="nofollow noopener">http://testing.com?aPb</a></p>'
|
||||||
expect(cooked).to eq(html)
|
expect(cooked).to eq(html)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it "should sanitize the html" do
|
||||||
|
expect(PrettyText.cook("<test>alert(42)</test>")).to eq "<p>alert(42)</p>"
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should not onebox magically linked urls" do
|
||||||
|
expect(PrettyText.cook('[url]site.com[/url]')).not_to include('onebox')
|
||||||
|
end
|
||||||
|
|
||||||
it "should sanitize the html" do
|
it "should sanitize the html" do
|
||||||
|
expect(PrettyText.cook("<p class='hi'>hi</p>")).to eq "<p>hi</p>"
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should strip SCRIPT" do
|
||||||
expect(PrettyText.cook("<script>alert(42)</script>")).to eq ""
|
expect(PrettyText.cook("<script>alert(42)</script>")).to eq ""
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it "should allow sanitize bypass" do
|
||||||
|
expect(PrettyText.cook("<test>alert(42)</test>", sanitize: false)).to eq "<p><test>alert(42)</test></p>"
|
||||||
|
end
|
||||||
|
|
||||||
# custom rule used to specify image dimensions via alt tags
|
# custom rule used to specify image dimensions via alt tags
|
||||||
describe "image dimensions" do
|
describe "image dimensions" do
|
||||||
it "allows title plus dimensions" do
|
it "allows title plus dimensions" do
|
||||||
|
|
|
@ -5,7 +5,7 @@ import { IMAGE_VERSION as v} from 'pretty-text/emoji';
|
||||||
|
|
||||||
QUnit.module("lib:pretty-text");
|
QUnit.module("lib:pretty-text");
|
||||||
|
|
||||||
const defaultOpts = buildOptions({
|
const rawOpts = {
|
||||||
siteSettings: {
|
siteSettings: {
|
||||||
enable_emoji: true,
|
enable_emoji: true,
|
||||||
emoji_set: 'emoji_one',
|
emoji_set: 'emoji_one',
|
||||||
|
@ -15,7 +15,9 @@ const defaultOpts = buildOptions({
|
||||||
censored_pattern: '\\d{3}-\\d{4}|tech\\w*'
|
censored_pattern: '\\d{3}-\\d{4}|tech\\w*'
|
||||||
},
|
},
|
||||||
getURL: url => url
|
getURL: url => url
|
||||||
});
|
};
|
||||||
|
|
||||||
|
const defaultOpts = buildOptions(rawOpts);
|
||||||
|
|
||||||
QUnit.assert.cooked = function(input, expected, message) {
|
QUnit.assert.cooked = function(input, expected, message) {
|
||||||
const actual = new PrettyText(defaultOpts).cook(input);
|
const actual = new PrettyText(defaultOpts).cook(input);
|
||||||
|
@ -28,7 +30,8 @@ QUnit.assert.cooked = function(input, expected, message) {
|
||||||
};
|
};
|
||||||
|
|
||||||
QUnit.assert.cookedOptions = function(input, opts, expected, message) {
|
QUnit.assert.cookedOptions = function(input, opts, expected, message) {
|
||||||
const actual = new PrettyText(_.merge({}, defaultOpts, opts)).cook(input);
|
const merged = _.merge({}, rawOpts, opts);
|
||||||
|
const actual = new PrettyText(buildOptions(merged)).cook(input);
|
||||||
this.pushResult({
|
this.pushResult({
|
||||||
result: actual === expected,
|
result: actual === expected,
|
||||||
actual,
|
actual,
|
||||||
|
@ -41,9 +44,18 @@ QUnit.assert.cookedPara = function(input, expected, message) {
|
||||||
QUnit.assert.cooked(input, `<p>${expected}</p>`, message);
|
QUnit.assert.cooked(input, `<p>${expected}</p>`, message);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
QUnit.skip("Pending Engine fixes and spec fixes", assert => {
|
||||||
|
assert.cooked("Derpy: http://derp.com?_test_=1",
|
||||||
|
'<p>Derpy: <a href=https://derp.com?_test_=1"http://derp.com?_test_=1">http://derp.com?_test_=1</a></p>',
|
||||||
|
"works with underscores in urls");
|
||||||
|
|
||||||
|
assert.cooked("**a*_b**", "<p><strong>a*_b</strong></p>", "allows for characters within bold");
|
||||||
|
});
|
||||||
|
|
||||||
QUnit.test("buildOptions", assert => {
|
QUnit.test("buildOptions", assert => {
|
||||||
assert.ok(buildOptions({ siteSettings: { enable_emoji: true } }).features.emoji, 'emoji enabled');
|
assert.ok(buildOptions({ siteSettings: { enable_emoji: true } }).discourse.features.emoji, 'emoji enabled');
|
||||||
assert.ok(!buildOptions({ siteSettings: { enable_emoji: false } }).features.emoji, 'emoji disabled');
|
assert.ok(!buildOptions({ siteSettings: { enable_emoji: false } }).discourse.features.emoji, 'emoji disabled');
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("basic cooking", assert => {
|
QUnit.test("basic cooking", assert => {
|
||||||
|
@ -66,10 +78,8 @@ QUnit.test("Nested bold and italics", assert => {
|
||||||
|
|
||||||
QUnit.test("Traditional Line Breaks", assert => {
|
QUnit.test("Traditional Line Breaks", assert => {
|
||||||
const input = "1\n2\n3";
|
const input = "1\n2\n3";
|
||||||
assert.cooked(input, "<p>1<br/>2<br/>3</p>", "automatically handles trivial newlines");
|
assert.cooked(input, "<p>1<br>\n2<br>\n3</p>", "automatically handles trivial newlines");
|
||||||
|
assert.cookedOptions(input, { siteSettings: {traditional_markdown_linebreaks: true} }, "<p>1\n2\n3</p>");
|
||||||
const result = new PrettyText({ traditionalMarkdownLinebreaks: true }).cook(input);
|
|
||||||
assert.equal(result, "<p>1\n2\n3</p>");
|
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("Unbalanced underscores", assert => {
|
QUnit.test("Unbalanced underscores", assert => {
|
||||||
|
@ -78,15 +88,19 @@ QUnit.test("Unbalanced underscores", assert => {
|
||||||
|
|
||||||
QUnit.test("Line Breaks", assert => {
|
QUnit.test("Line Breaks", assert => {
|
||||||
assert.cooked("[] first choice\n[] second choice",
|
assert.cooked("[] first choice\n[] second choice",
|
||||||
"<p>[] first choice<br/>[] second choice</p>",
|
"<p>[] first choice<br>\n[] second choice</p>",
|
||||||
"it handles new lines correctly with [] options");
|
"it handles new lines correctly with [] options");
|
||||||
|
|
||||||
|
// note this is a change from previous engine but is correct
|
||||||
|
// we have an html block and behavior is defined per common mark
|
||||||
|
// spec
|
||||||
|
// ole engine would wrap trout in a <p>
|
||||||
assert.cooked("<blockquote>evil</blockquote>\ntrout",
|
assert.cooked("<blockquote>evil</blockquote>\ntrout",
|
||||||
"<blockquote>evil</blockquote>\n\n<p>trout</p>",
|
"<blockquote>evil</blockquote>\ntrout",
|
||||||
"it doesn't insert <br> after blockquotes");
|
"it doesn't insert <br> after blockquotes");
|
||||||
|
|
||||||
assert.cooked("leading<blockquote>evil</blockquote>\ntrout",
|
assert.cooked("leading<blockquote>evil</blockquote>\ntrout",
|
||||||
"leading<blockquote>evil</blockquote>\n\n<p>trout</p>",
|
"<p>leading<blockquote>evil</blockquote><br>\ntrout</p>",
|
||||||
"it doesn't insert <br> after blockquotes with leading text");
|
"it doesn't insert <br> after blockquotes with leading text");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -111,10 +125,6 @@ QUnit.test("Links", assert => {
|
||||||
'<p>Derpy: <a href="http://derp.com?__test=1">http://derp.com?__test=1</a></p>',
|
'<p>Derpy: <a href="http://derp.com?__test=1">http://derp.com?__test=1</a></p>',
|
||||||
"works with double underscores in urls");
|
"works with double underscores in urls");
|
||||||
|
|
||||||
assert.cooked("Derpy: http://derp.com?_test_=1",
|
|
||||||
'<p>Derpy: <a href="http://derp.com?_test_=1">http://derp.com?_test_=1</a></p>',
|
|
||||||
"works with underscores in urls");
|
|
||||||
|
|
||||||
assert.cooked("Atwood: www.codinghorror.com",
|
assert.cooked("Atwood: www.codinghorror.com",
|
||||||
'<p>Atwood: <a href="http://www.codinghorror.com">www.codinghorror.com</a></p>',
|
'<p>Atwood: <a href="http://www.codinghorror.com">www.codinghorror.com</a></p>',
|
||||||
"autolinks something that begins with www");
|
"autolinks something that begins with www");
|
||||||
|
@ -136,11 +146,11 @@ QUnit.test("Links", assert => {
|
||||||
"autolinks a URL with parentheses (like Wikipedia)");
|
"autolinks a URL with parentheses (like Wikipedia)");
|
||||||
|
|
||||||
assert.cooked("Here's a tweet:\nhttps://twitter.com/evil_trout/status/345954894420787200",
|
assert.cooked("Here's a tweet:\nhttps://twitter.com/evil_trout/status/345954894420787200",
|
||||||
"<p>Here's a tweet:<br/><a href=\"https://twitter.com/evil_trout/status/345954894420787200\" class=\"onebox\" target=\"_blank\">https://twitter.com/evil_trout/status/345954894420787200</a></p>",
|
"<p>Here's a tweet:<br>\n<a href=\"https://twitter.com/evil_trout/status/345954894420787200\" class=\"onebox\" target=\"_blank\">https://twitter.com/evil_trout/status/345954894420787200</a></p>",
|
||||||
"It doesn't strip the new line.");
|
"It doesn't strip the new line.");
|
||||||
|
|
||||||
assert.cooked("1. View @eviltrout's profile here: http://meta.discourse.org/u/eviltrout/activity<br/>next line.",
|
assert.cooked("1. View @eviltrout's profile here: http://meta.discourse.org/u/eviltrout/activity<br/>next line.",
|
||||||
"<ol><li>View <span class=\"mention\">@eviltrout</span>'s profile here: <a href=\"http://meta.discourse.org/u/eviltrout/activity\">http://meta.discourse.org/u/eviltrout/activity</a><br>next line.</li></ol>",
|
"<ol>\n<li>View <span class=\"mention\">@eviltrout</span>'s profile here: <a href=\"http://meta.discourse.org/u/eviltrout/activity\">http://meta.discourse.org/u/eviltrout/activity</a><br>next line.</li>\n</ol>",
|
||||||
"allows autolinking within a list without inserting a paragraph.");
|
"allows autolinking within a list without inserting a paragraph.");
|
||||||
|
|
||||||
assert.cooked("[3]: http://eviltrout.com", "", "It doesn't autolink markdown link references");
|
assert.cooked("[3]: http://eviltrout.com", "", "It doesn't autolink markdown link references");
|
||||||
|
@ -155,8 +165,8 @@ QUnit.test("Links", assert => {
|
||||||
"<a href=\"http://www.imdb.com/name/nm2225369\">http://www.imdb.com/name/nm2225369</a></p>",
|
"<a href=\"http://www.imdb.com/name/nm2225369\">http://www.imdb.com/name/nm2225369</a></p>",
|
||||||
'allows multiple links on one line');
|
'allows multiple links on one line');
|
||||||
|
|
||||||
assert.cooked("* [Evil Trout][1]\n [1]: http://eviltrout.com",
|
assert.cooked("* [Evil Trout][1]\n\n[1]: http://eviltrout.com",
|
||||||
"<ul><li><a href=\"http://eviltrout.com\">Evil Trout</a></li></ul>",
|
"<ul>\n<li><a href=\"http://eviltrout.com\">Evil Trout</a></li>\n</ul>",
|
||||||
"allows markdown link references in a list");
|
"allows markdown link references in a list");
|
||||||
|
|
||||||
assert.cooked("User [MOD]: Hello!",
|
assert.cooked("User [MOD]: Hello!",
|
||||||
|
@ -175,7 +185,7 @@ QUnit.test("Links", assert => {
|
||||||
|
|
||||||
|
|
||||||
assert.cooked("[Link](http://www.example.com) (with an outer \"description\")",
|
assert.cooked("[Link](http://www.example.com) (with an outer \"description\")",
|
||||||
"<p><a href=\"http://www.example.com\">Link</a> (with an outer \"description\")</p>",
|
"<p><a href=\"http://www.example.com\">Link</a> (with an outer "description")</p>",
|
||||||
"it doesn't consume closing parens as part of the url");
|
"it doesn't consume closing parens as part of the url");
|
||||||
|
|
||||||
assert.cooked("A link inside parentheses (http://www.example.com)",
|
assert.cooked("A link inside parentheses (http://www.example.com)",
|
||||||
|
@ -188,50 +198,76 @@ QUnit.test("Links", assert => {
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("simple quotes", assert => {
|
QUnit.test("simple quotes", assert => {
|
||||||
assert.cooked("> nice!", "<blockquote><p>nice!</p></blockquote>", "it supports simple quotes");
|
assert.cooked("> nice!", "<blockquote>\n<p>nice!</p>\n</blockquote>", "it supports simple quotes");
|
||||||
assert.cooked(" > nice!", "<blockquote><p>nice!</p></blockquote>", "it allows quotes with preceding spaces");
|
assert.cooked(" > nice!", "<blockquote>\n<p>nice!</p>\n</blockquote>", "it allows quotes with preceding spaces");
|
||||||
assert.cooked("> level 1\n> > level 2",
|
assert.cooked("> level 1\n> > level 2",
|
||||||
"<blockquote><p>level 1</p><blockquote><p>level 2</p></blockquote></blockquote>",
|
"<blockquote>\n<p>level 1</p>\n<blockquote>\n<p>level 2</p>\n</blockquote>\n</blockquote>",
|
||||||
"it allows nesting of blockquotes");
|
"it allows nesting of blockquotes");
|
||||||
assert.cooked("> level 1\n> > level 2",
|
assert.cooked("> level 1\n> > level 2",
|
||||||
"<blockquote><p>level 1</p><blockquote><p>level 2</p></blockquote></blockquote>",
|
"<blockquote>\n<p>level 1</p>\n<blockquote>\n<p>level 2</p>\n</blockquote>\n</blockquote>",
|
||||||
"it allows nesting of blockquotes with spaces");
|
"it allows nesting of blockquotes with spaces");
|
||||||
|
|
||||||
assert.cooked("- hello\n\n > world\n > eviltrout",
|
assert.cooked("- hello\n\n > world\n > eviltrout",
|
||||||
"<ul><li>hello</li></ul>\n\n<blockquote><p>world<br/>eviltrout</p></blockquote>",
|
`<ul>
|
||||||
|
<li>
|
||||||
|
<p>hello</p>
|
||||||
|
<blockquote>
|
||||||
|
<p>world<br>
|
||||||
|
eviltrout</p>
|
||||||
|
</blockquote>
|
||||||
|
</li>
|
||||||
|
</ul>`,
|
||||||
"it allows quotes within a list.");
|
"it allows quotes within a list.");
|
||||||
|
|
||||||
assert.cooked("- <p>eviltrout</p>",
|
assert.cooked("- <p>eviltrout</p>",
|
||||||
"<ul><li><p>eviltrout</p></li></ul>",
|
"<ul>\n<li>\n<p>eviltrout</p></li>\n</ul>",
|
||||||
"it allows paragraphs within a list.");
|
"it allows paragraphs within a list.");
|
||||||
|
|
||||||
|
|
||||||
assert.cooked(" > indent 1\n > indent 2", "<blockquote><p>indent 1<br/>indent 2</p></blockquote>", "allow multiple spaces to indent");
|
assert.cooked(" > indent 1\n > indent 2", "<blockquote>\n<p>indent 1<br>\nindent 2</p>\n</blockquote>", "allow multiple spaces to indent");
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("Quotes", assert => {
|
QUnit.test("Quotes", assert => {
|
||||||
|
|
||||||
assert.cookedOptions("[quote=\"eviltrout, post: 1\"]\na quote\n\nsecond line\n\nthird line[/quote]",
|
assert.cookedOptions("[quote=\"eviltrout, post: 1\"]\na quote\n\nsecond line\n\nthird line\n[/quote]",
|
||||||
{ topicId: 2 },
|
{ topicId: 2 },
|
||||||
"<aside class=\"quote\" data-post=\"1\"><div class=\"title\"><div class=\"quote-controls\"></div>eviltrout:</div><blockquote>" +
|
`<aside class=\"quote\" data-post=\"1\">
|
||||||
"<p>a quote</p><p>second line</p><p>third line</p></blockquote></aside>",
|
<div class=\"title\">
|
||||||
|
<div class=\"quote-controls\"></div>
|
||||||
|
eviltrout:</div>
|
||||||
|
<blockquote>
|
||||||
|
<p>a quote</p>
|
||||||
|
<p>second line</p>
|
||||||
|
<p>third line</p>
|
||||||
|
</blockquote>
|
||||||
|
</aside>`,
|
||||||
"works with multiple lines");
|
"works with multiple lines");
|
||||||
|
|
||||||
assert.cookedOptions("1[quote=\"bob, post:1\"]my quote[/quote]2",
|
|
||||||
{ topicId: 2, lookupAvatar: function(name) { return "" + name; }, sanitize: true },
|
|
||||||
"<p>1</p>\n\n<aside class=\"quote\" data-post=\"1\"><div class=\"title\"><div class=\"quote-controls\"></div>bob" +
|
|
||||||
"bob:</div><blockquote><p>my quote</p></blockquote></aside>\n\n<p>2</p>",
|
|
||||||
"handles quotes properly");
|
|
||||||
|
|
||||||
assert.cookedOptions("1[quote=\"bob, post:1\"]my quote[/quote]2",
|
assert.cookedOptions("[quote=\"bob, post:1\"]\nmy quote\n[/quote]",
|
||||||
{ topicId: 2, lookupAvatar: function() { } },
|
{ topicId: 2, lookupAvatar: function() { } },
|
||||||
"<p>1</p>\n\n<aside class=\"quote\" data-post=\"1\"><div class=\"title\"><div class=\"quote-controls\"></div>bob:" +
|
`<aside class=\"quote\" data-post=\"1\">
|
||||||
"</div><blockquote><p>my quote</p></blockquote></aside>\n\n<p>2</p>",
|
<div class=\"title\">
|
||||||
|
<div class=\"quote-controls\"></div>
|
||||||
|
bob:</div>
|
||||||
|
<blockquote>
|
||||||
|
<p>my quote</p>
|
||||||
|
</blockquote>
|
||||||
|
</aside>`,
|
||||||
"includes no avatar if none is found");
|
"includes no avatar if none is found");
|
||||||
|
|
||||||
assert.cooked(`[quote]\na\n\n[quote]\nb\n[/quote]\n[/quote]`,
|
assert.cooked(`[quote]\na\n\n[quote]\nb\n[/quote]\n[/quote]`,
|
||||||
"<p><aside class=\"quote\"><blockquote><p>a</p><p><aside class=\"quote\"><blockquote><p>b</p></blockquote></aside></p></blockquote></aside></p>",
|
`<aside class=\"quote\">
|
||||||
|
<blockquote>
|
||||||
|
<p>a</p>
|
||||||
|
<aside class=\"quote\">
|
||||||
|
<blockquote>
|
||||||
|
<p>b</p>
|
||||||
|
</blockquote>
|
||||||
|
</aside>
|
||||||
|
</blockquote>
|
||||||
|
</aside>`,
|
||||||
"handles nested quotes properly");
|
"handles nested quotes properly");
|
||||||
|
|
||||||
});
|
});
|
||||||
|
@ -261,7 +297,7 @@ QUnit.test("Mentions", assert => {
|
||||||
"won't add mention class to an email address");
|
"won't add mention class to an email address");
|
||||||
|
|
||||||
assert.cooked("hanzo55@yahoo.com",
|
assert.cooked("hanzo55@yahoo.com",
|
||||||
"<p>hanzo55@yahoo.com</p>",
|
"<p><a href=\"mailto:hanzo55@yahoo.com\">hanzo55@yahoo.com</a></p>",
|
||||||
"won't be affected by email addresses that have a number before the @ symbol");
|
"won't be affected by email addresses that have a number before the @ symbol");
|
||||||
|
|
||||||
assert.cooked("@EvilTrout yo",
|
assert.cooked("@EvilTrout yo",
|
||||||
|
@ -269,7 +305,7 @@ QUnit.test("Mentions", assert => {
|
||||||
"it handles mentions at the beginning of a string");
|
"it handles mentions at the beginning of a string");
|
||||||
|
|
||||||
assert.cooked("yo\n@EvilTrout",
|
assert.cooked("yo\n@EvilTrout",
|
||||||
"<p>yo<br/><span class=\"mention\">@EvilTrout</span></p>",
|
"<p>yo<br>\n<span class=\"mention\">@EvilTrout</span></p>",
|
||||||
"it handles mentions at the beginning of a new line");
|
"it handles mentions at the beginning of a new line");
|
||||||
|
|
||||||
assert.cooked("`evil` @EvilTrout `trout`",
|
assert.cooked("`evil` @EvilTrout `trout`",
|
||||||
|
@ -277,15 +313,15 @@ QUnit.test("Mentions", assert => {
|
||||||
"deals correctly with multiple <code> blocks");
|
"deals correctly with multiple <code> blocks");
|
||||||
|
|
||||||
assert.cooked("```\na @test\n```",
|
assert.cooked("```\na @test\n```",
|
||||||
"<p><pre><code class=\"lang-auto\">a @test</code></pre></p>",
|
"<pre><code class=\"lang-auto\">a @test\n</code></pre>",
|
||||||
"should not do mentions within a code block.");
|
"should not do mentions within a code block.");
|
||||||
|
|
||||||
assert.cooked("> foo bar baz @eviltrout",
|
assert.cooked("> foo bar baz @eviltrout",
|
||||||
"<blockquote><p>foo bar baz <span class=\"mention\">@eviltrout</span></p></blockquote>",
|
"<blockquote>\n<p>foo bar baz <span class=\"mention\">@eviltrout</span></p>\n</blockquote>",
|
||||||
"handles mentions in simple quotes");
|
"handles mentions in simple quotes");
|
||||||
|
|
||||||
assert.cooked("> foo bar baz @eviltrout ohmagerd\nlook at this",
|
assert.cooked("> foo bar baz @eviltrout ohmagerd\nlook at this",
|
||||||
"<blockquote><p>foo bar baz <span class=\"mention\">@eviltrout</span> ohmagerd<br/>look at this</p></blockquote>",
|
"<blockquote>\n<p>foo bar baz <span class=\"mention\">@eviltrout</span> ohmagerd<br>\nlook at this</p>\n</blockquote>",
|
||||||
"does mentions properly with trailing text within a simple quote");
|
"does mentions properly with trailing text within a simple quote");
|
||||||
|
|
||||||
assert.cooked("`code` is okay before @mention",
|
assert.cooked("`code` is okay before @mention",
|
||||||
|
@ -309,7 +345,7 @@ QUnit.test("Mentions", assert => {
|
||||||
"you can have a mention in an inline code block following a real mention.");
|
"you can have a mention in an inline code block following a real mention.");
|
||||||
|
|
||||||
assert.cooked("1. this is a list\n\n2. this is an @eviltrout mention\n",
|
assert.cooked("1. this is a list\n\n2. this is an @eviltrout mention\n",
|
||||||
"<ol><li><p>this is a list</p></li><li><p>this is an <span class=\"mention\">@eviltrout</span> mention</p></li></ol>",
|
"<ol>\n<li>\n<p>this is a list</p>\n</li>\n<li>\n<p>this is an <span class=\"mention\">@eviltrout</span> mention</p>\n</li>\n</ol>",
|
||||||
"it mentions properly in a list.");
|
"it mentions properly in a list.");
|
||||||
|
|
||||||
assert.cooked("Hello @foo/@bar",
|
assert.cooked("Hello @foo/@bar",
|
||||||
|
@ -341,11 +377,11 @@ QUnit.test("Category hashtags", assert => {
|
||||||
"it does not translate category hashtag within links");
|
"it does not translate category hashtag within links");
|
||||||
|
|
||||||
assert.cooked("```\n# #category-hashtag\n```",
|
assert.cooked("```\n# #category-hashtag\n```",
|
||||||
"<p><pre><code class=\"lang-auto\"># #category-hashtag</code></pre></p>",
|
"<pre><code class=\"lang-auto\"># #category-hashtag\n</code></pre>",
|
||||||
"it does not translate category hashtags to links in code blocks");
|
"it does not translate category hashtags to links in code blocks");
|
||||||
|
|
||||||
assert.cooked("># #category-hashtag\n",
|
assert.cooked("># #category-hashtag\n",
|
||||||
"<blockquote><h1><span class=\"hashtag\">#category-hashtag</span></h1></blockquote>",
|
"<blockquote>\n<h1><span class=\"hashtag\">#category-hashtag</span></h1>\n</blockquote>",
|
||||||
"it handles category hashtags in simple quotes");
|
"it handles category hashtags in simple quotes");
|
||||||
|
|
||||||
assert.cooked("# #category-hashtag",
|
assert.cooked("# #category-hashtag",
|
||||||
|
@ -356,10 +392,6 @@ QUnit.test("Category hashtags", assert => {
|
||||||
"<p>don't <code>#category-hashtag</code></p>",
|
"<p>don't <code>#category-hashtag</code></p>",
|
||||||
"it does not mention in an inline code block");
|
"it does not mention in an inline code block");
|
||||||
|
|
||||||
assert.cooked("test #hashtag1/#hashtag2",
|
|
||||||
"<p>test <span class=\"hashtag\">#hashtag1</span>/#hashtag2</p>",
|
|
||||||
"it does not convert category hashtag not bounded by spaces");
|
|
||||||
|
|
||||||
assert.cooked("<small>#category-hashtag</small>",
|
assert.cooked("<small>#category-hashtag</small>",
|
||||||
"<p><small><span class=\"hashtag\">#category-hashtag</span></small></p>",
|
"<p><small><span class=\"hashtag\">#category-hashtag</span></small></p>",
|
||||||
"it works between HTML tags");
|
"it works between HTML tags");
|
||||||
|
@ -371,14 +403,12 @@ QUnit.test("Heading", assert => {
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("bold and italics", assert => {
|
QUnit.test("bold and italics", assert => {
|
||||||
assert.cooked("a \"**hello**\"", "<p>a \"<strong>hello</strong>\"</p>", "bolds in quotes");
|
assert.cooked("a \"**hello**\"", "<p>a "<strong>hello</strong>"</p>", "bolds in quotes");
|
||||||
assert.cooked("(**hello**)", "<p>(<strong>hello</strong>)</p>", "bolds in parens");
|
assert.cooked("(**hello**)", "<p>(<strong>hello</strong>)</p>", "bolds in parens");
|
||||||
assert.cooked("**hello**\nworld", "<p><strong>hello</strong><br>world</p>", "allows newline after bold");
|
assert.cooked("**hello**\nworld", "<p><strong>hello</strong><br>\nworld</p>", "allows newline after bold");
|
||||||
assert.cooked("**hello**\n**world**", "<p><strong>hello</strong><br><strong>world</strong></p>", "newline between two bolds");
|
assert.cooked("**hello**\n**world**", "<p><strong>hello</strong><br>\n<strong>world</strong></p>", "newline between two bolds");
|
||||||
assert.cooked("**a*_b**", "<p><strong>a*_b</strong></p>", "allows for characters within bold");
|
|
||||||
assert.cooked("** hello**", "<p>** hello**</p>", "does not bold on a space boundary");
|
assert.cooked("** hello**", "<p>** hello**</p>", "does not bold on a space boundary");
|
||||||
assert.cooked("**hello **", "<p>**hello **</p>", "does not bold on a space boundary");
|
assert.cooked("**hello **", "<p>**hello **</p>", "does not bold on a space boundary");
|
||||||
assert.cooked("你**hello**", "<p>你**hello**</p>", "does not bold chinese intra word");
|
|
||||||
assert.cooked("**你hello**", "<p><strong>你hello</strong></p>", "allows bolded chinese");
|
assert.cooked("**你hello**", "<p><strong>你hello</strong></p>", "allows bolded chinese");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -388,10 +418,11 @@ QUnit.test("Escaping", assert => {
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("New Lines", assert => {
|
QUnit.test("New Lines", assert => {
|
||||||
// Note: This behavior was discussed and we determined it does not make sense to do this
|
// historically we would not continue inline em or b across lines,
|
||||||
// unless you're using traditional line breaks
|
// however commonmark gives us no switch to do so and we would be very non compliant.
|
||||||
assert.cooked("_abc\ndef_", "<p>_abc<br>def_</p>", "it does not allow markup to span new lines");
|
// turning softbreaks into a newline is just a renderer option, not a parser switch.
|
||||||
assert.cooked("_abc\n\ndef_", "<p>_abc</p>\n\n<p>def_</p>", "it does not allow markup to span new paragraphs");
|
assert.cooked("_abc\ndef_", "<p><em>abc<br>\ndef</em></p>", "it does allow inlines to span new lines");
|
||||||
|
assert.cooked("_abc\n\ndef_", "<p>_abc</p>\n<p>def_</p>", "it does not allow inlines to span new paragraphs");
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("Oneboxing", assert => {
|
QUnit.test("Oneboxing", assert => {
|
||||||
|
@ -408,9 +439,9 @@ QUnit.test("Oneboxing", assert => {
|
||||||
assert.ok(!matches("http://test.com bob", /onebox/), "doesn't onebox links that have trailing text");
|
assert.ok(!matches("http://test.com bob", /onebox/), "doesn't onebox links that have trailing text");
|
||||||
|
|
||||||
assert.ok(!matches("[Tom Cruise](http://www.tomcruise.com/)", "onebox"), "Markdown links with labels are not oneboxed");
|
assert.ok(!matches("[Tom Cruise](http://www.tomcruise.com/)", "onebox"), "Markdown links with labels are not oneboxed");
|
||||||
assert.ok(matches("[http://www.tomcruise.com/](http://www.tomcruise.com/)",
|
assert.ok(!matches("[http://www.tomcruise.com/](http://www.tomcruise.com/)",
|
||||||
"onebox"),
|
"onebox"),
|
||||||
"Markdown links where the label is the same as the url are oneboxed");
|
"Markdown links where the label is the same as the url but link is explicit");
|
||||||
|
|
||||||
assert.cooked("http://en.wikipedia.org/wiki/Homicide:_Life_on_the_Street",
|
assert.cooked("http://en.wikipedia.org/wiki/Homicide:_Life_on_the_Street",
|
||||||
"<p><a href=\"http://en.wikipedia.org/wiki/Homicide:_Life_on_the_Street\" class=\"onebox\"" +
|
"<p><a href=\"http://en.wikipedia.org/wiki/Homicide:_Life_on_the_Street\" class=\"onebox\"" +
|
||||||
|
@ -428,63 +459,63 @@ QUnit.test("links with full urls", assert => {
|
||||||
QUnit.test("Code Blocks", assert => {
|
QUnit.test("Code Blocks", assert => {
|
||||||
|
|
||||||
assert.cooked("<pre>\nhello\n</pre>\n",
|
assert.cooked("<pre>\nhello\n</pre>\n",
|
||||||
"<p><pre>hello</pre></p>",
|
"<pre>\nhello\n</pre>",
|
||||||
"pre blocks don't include extra lines");
|
"pre blocks don't include extra lines");
|
||||||
|
|
||||||
assert.cooked("```\na\nb\nc\n\nd\n```",
|
assert.cooked("```\na\nb\nc\n\nd\n```",
|
||||||
"<p><pre><code class=\"lang-auto\">a\nb\nc\n\nd</code></pre></p>",
|
"<pre><code class=\"lang-auto\">a\nb\nc\n\nd\n</code></pre>",
|
||||||
"it treats new lines properly");
|
"it treats new lines properly");
|
||||||
|
|
||||||
assert.cooked("```\ntest\n```",
|
assert.cooked("```\ntest\n```",
|
||||||
"<p><pre><code class=\"lang-auto\">test</code></pre></p>",
|
"<pre><code class=\"lang-auto\">test\n</code></pre>",
|
||||||
"it supports basic code blocks");
|
"it supports basic code blocks");
|
||||||
|
|
||||||
assert.cooked("```json\n{hello: 'world'}\n```\ntrailing",
|
assert.cooked("```json\n{hello: 'world'}\n```\ntrailing",
|
||||||
"<p><pre><code class=\"lang-json\">{hello: 'world'}</code></pre></p>\n\n<p>trailing</p>",
|
"<pre><code class=\"lang-json\">{hello: 'world'}\n</code></pre>\n<p>trailing</p>",
|
||||||
"It does not truncate text after a code block.");
|
"It does not truncate text after a code block.");
|
||||||
|
|
||||||
assert.cooked("```json\nline 1\n\nline 2\n\n\nline3\n```",
|
assert.cooked("```json\nline 1\n\nline 2\n\n\nline3\n```",
|
||||||
"<p><pre><code class=\"lang-json\">line 1\n\nline 2\n\n\nline3</code></pre></p>",
|
"<pre><code class=\"lang-json\">line 1\n\nline 2\n\n\nline3\n</code></pre>",
|
||||||
"it maintains new lines inside a code block.");
|
"it maintains new lines inside a code block.");
|
||||||
|
|
||||||
assert.cooked("hello\nworld\n```json\nline 1\n\nline 2\n\n\nline3\n```",
|
assert.cooked("hello\nworld\n```json\nline 1\n\nline 2\n\n\nline3\n```",
|
||||||
"<p>hello<br/>world<br/></p>\n\n<p><pre><code class=\"lang-json\">line 1\n\nline 2\n\n\nline3</code></pre></p>",
|
"<p>hello<br>\nworld</p>\n<pre><code class=\"lang-json\">line 1\n\nline 2\n\n\nline3\n</code></pre>",
|
||||||
"it maintains new lines inside a code block with leading content.");
|
"it maintains new lines inside a code block with leading content.");
|
||||||
|
|
||||||
assert.cooked("```ruby\n<header>hello</header>\n```",
|
assert.cooked("```ruby\n<header>hello</header>\n```",
|
||||||
"<p><pre><code class=\"lang-ruby\"><header>hello</header></code></pre></p>",
|
"<pre><code class=\"lang-ruby\"><header>hello</header>\n</code></pre>",
|
||||||
"it escapes code in the code block");
|
"it escapes code in the code block");
|
||||||
|
|
||||||
assert.cooked("```text\ntext\n```",
|
assert.cooked("```text\ntext\n```",
|
||||||
"<p><pre><code class=\"lang-nohighlight\">text</code></pre></p>",
|
"<pre><code class=\"lang-nohighlight\">text\n</code></pre>",
|
||||||
"handles text by adding nohighlight");
|
"handles text by adding nohighlight");
|
||||||
|
|
||||||
assert.cooked("```ruby\n# cool\n```",
|
assert.cooked("```ruby\n# cool\n```",
|
||||||
"<p><pre><code class=\"lang-ruby\"># cool</code></pre></p>",
|
"<pre><code class=\"lang-ruby\"># cool\n</code></pre>",
|
||||||
"it supports changing the language");
|
"it supports changing the language");
|
||||||
|
|
||||||
assert.cooked(" ```\n hello\n ```",
|
assert.cooked(" ```\n hello\n ```",
|
||||||
"<pre><code>```\nhello\n```</code></pre>",
|
"<pre><code>```\nhello\n```</code></pre>",
|
||||||
"only detect ``` at the beginning of lines");
|
"only detect ``` at the beginning of lines");
|
||||||
|
|
||||||
assert.cooked("```ruby\ndef self.parse(text)\n\n text\nend\n```",
|
assert.cooked("```ruby\ndef self.parse(text)\n\n text\nend\n```",
|
||||||
"<p><pre><code class=\"lang-ruby\">def self.parse(text)\n\n text\nend</code></pre></p>",
|
"<pre><code class=\"lang-ruby\">def self.parse(text)\n\n text\nend\n</code></pre>",
|
||||||
"it allows leading spaces on lines in a code block.");
|
"it allows leading spaces on lines in a code block.");
|
||||||
|
|
||||||
assert.cooked("```ruby\nhello `eviltrout`\n```",
|
assert.cooked("```ruby\nhello `eviltrout`\n```",
|
||||||
"<p><pre><code class=\"lang-ruby\">hello `eviltrout`</code></pre></p>",
|
"<pre><code class=\"lang-ruby\">hello `eviltrout`\n</code></pre>",
|
||||||
"it allows code with backticks in it");
|
"it allows code with backticks in it");
|
||||||
|
|
||||||
assert.cooked("```eviltrout\nhello\n```",
|
assert.cooked("```eviltrout\nhello\n```",
|
||||||
"<p><pre><code class=\"lang-auto\">hello</code></pre></p>",
|
"<pre><code class=\"lang-auto\">hello\n</code></pre>",
|
||||||
"it doesn't not whitelist all classes");
|
"it doesn't not whitelist all classes");
|
||||||
|
|
||||||
assert.cooked("```\n[quote=\"sam, post:1, topic:9441, full:true\"]This is `<not>` a bug.[/quote]\n```",
|
assert.cooked("```\n[quote=\"sam, post:1, topic:9441, full:true\"]This is `<not>` a bug.[/quote]\n```",
|
||||||
"<p><pre><code class=\"lang-auto\">[quote="sam, post:1, topic:9441, full:true"]This is `<not>` a bug.[/quote]</code></pre></p>",
|
"<pre><code class=\"lang-auto\">[quote="sam, post:1, topic:9441, full:true"]This is `<not>` a bug.[/quote]\n</code></pre>",
|
||||||
"it allows code with backticks in it");
|
"it allows code with backticks in it");
|
||||||
|
|
||||||
assert.cooked(" hello\n<blockquote>test</blockquote>",
|
assert.cooked(" hello\n<blockquote>test</blockquote>",
|
||||||
"<pre><code>hello</code></pre>\n\n<blockquote>test</blockquote>",
|
"<pre><code>hello\n</code></pre>\n<blockquote>test</blockquote>",
|
||||||
"it allows an indented code block to by followed by a `<blockquote>`");
|
"it allows an indented code block to by followed by a `<blockquote>`");
|
||||||
|
|
||||||
assert.cooked("``` foo bar ```",
|
assert.cooked("``` foo bar ```",
|
||||||
|
@ -492,7 +523,7 @@ QUnit.test("Code Blocks", assert => {
|
||||||
"it tolerates misuse of code block tags as inline code");
|
"it tolerates misuse of code block tags as inline code");
|
||||||
|
|
||||||
assert.cooked("```\nline1\n```\n```\nline2\n\nline3\n```",
|
assert.cooked("```\nline1\n```\n```\nline2\n\nline3\n```",
|
||||||
"<p><pre><code class=\"lang-auto\">line1</code></pre></p>\n\n<p><pre><code class=\"lang-auto\">line2\n\nline3</code></pre></p>",
|
"<pre><code class=\"lang-auto\">line1\n</code></pre>\n<pre><code class=\"lang-auto\">line2\n\nline3\n</code></pre>",
|
||||||
"it does not consume next block's trailing newlines");
|
"it does not consume next block's trailing newlines");
|
||||||
|
|
||||||
assert.cooked(" <pre>test</pre>",
|
assert.cooked(" <pre>test</pre>",
|
||||||
|
@ -504,22 +535,22 @@ QUnit.test("Code Blocks", assert => {
|
||||||
"it does not parse other block types in markdown code blocks");
|
"it does not parse other block types in markdown code blocks");
|
||||||
|
|
||||||
assert.cooked("## a\nb\n```\nc\n```",
|
assert.cooked("## a\nb\n```\nc\n```",
|
||||||
"<h2>a</h2>\n\n<p><pre><code class=\"lang-auto\">c</code></pre></p>",
|
"<h2>a</h2>\n<p>b</p>\n<pre><code class=\"lang-auto\">c\n</code></pre>",
|
||||||
"it handles headings with code blocks after them.");
|
"it handles headings with code blocks after them.");
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("URLs in BBCode tags", assert => {
|
QUnit.test("URLs in BBCode tags", assert => {
|
||||||
|
|
||||||
assert.cooked("[img]http://eviltrout.com/eviltrout.png[/img][img]http://samsaffron.com/samsaffron.png[/img]",
|
assert.cooked("[img]http://eviltrout.com/eviltrout.png[/img][img]http://samsaffron.com/samsaffron.png[/img]",
|
||||||
"<p><img src=\"http://eviltrout.com/eviltrout.png\"/><img src=\"http://samsaffron.com/samsaffron.png\"/></p>",
|
"<p><img src=\"http://eviltrout.com/eviltrout.png\" alt/><img src=\"http://samsaffron.com/samsaffron.png\" alt/></p>",
|
||||||
"images are properly parsed");
|
"images are properly parsed");
|
||||||
|
|
||||||
assert.cooked("[url]http://discourse.org[/url]",
|
assert.cooked("[url]http://discourse.org[/url]",
|
||||||
"<p><a href=\"http://discourse.org\">http://discourse.org</a></p>",
|
"<p><a href=\"http://discourse.org\" data-bbcode=\"true\">http://discourse.org</a></p>",
|
||||||
"links are properly parsed");
|
"links are properly parsed");
|
||||||
|
|
||||||
assert.cooked("[url=http://discourse.org]discourse[/url]",
|
assert.cooked("[url=http://discourse.org]discourse[/url]",
|
||||||
"<p><a href=\"http://discourse.org\">discourse</a></p>",
|
"<p><a href=\"http://discourse.org\" data-bbcode=\"true\">discourse</a></p>",
|
||||||
"named links are properly parsed");
|
"named links are properly parsed");
|
||||||
|
|
||||||
});
|
});
|
||||||
|
@ -530,39 +561,39 @@ QUnit.test("images", assert => {
|
||||||
"It allows images with links around them");
|
"It allows images with links around them");
|
||||||
|
|
||||||
assert.cooked("<img src=\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==\" alt=\"Red dot\">",
|
assert.cooked("<img src=\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==\" alt=\"Red dot\">",
|
||||||
"<p><img src=\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==\" alt=\"Red dot\"></p>",
|
"<p>\n<img src=\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==\" alt=\"Red dot\"></p>",
|
||||||
"It allows data images");
|
"It allows data images");
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("censoring", assert => {
|
QUnit.test("censoring", assert => {
|
||||||
assert.cooked("aw shucks, golly gee whiz.",
|
assert.cooked("aw shucks, golly gee whiz.",
|
||||||
"<p>aw ■■■■■■, golly gee ■■■■.</p>",
|
"<p>aw ■■■■■■, golly gee ■■■■.</p>",
|
||||||
"it censors words in the Site Settings");
|
"it censors words in the Site Settings");
|
||||||
|
|
||||||
assert.cooked("you are a whizzard! I love cheesewhiz. Whiz.",
|
assert.cooked("you are a whizzard! I love cheesewhiz. Whiz.",
|
||||||
"<p>you are a whizzard! I love cheesewhiz. ■■■■.</p>",
|
"<p>you are a whizzard! I love cheesewhiz. ■■■■.</p>",
|
||||||
"it doesn't censor words unless they have boundaries.");
|
"it doesn't censor words unless they have boundaries.");
|
||||||
|
|
||||||
assert.cooked("you are a whizzer! I love cheesewhiz. Whiz.",
|
assert.cooked("you are a whizzer! I love cheesewhiz. Whiz.",
|
||||||
"<p>you are a ■■■■■■■! I love cheesewhiz. ■■■■.</p>",
|
"<p>you are a ■■■■■■■! I love cheesewhiz. ■■■■.</p>",
|
||||||
"it censors words even if previous partial matches exist.");
|
"it censors words even if previous partial matches exist.");
|
||||||
|
|
||||||
assert.cooked("The link still works. [whiz](http://www.whiz.com)",
|
assert.cooked("The link still works. [whiz](http://www.whiz.com)",
|
||||||
"<p>The link still works. <a href=\"http://www.whiz.com\">■■■■</a></p>",
|
"<p>The link still works. <a href=\"http://www.whiz.com\">■■■■</a></p>",
|
||||||
"it won't break links by censoring them.");
|
"it won't break links by censoring them.");
|
||||||
|
|
||||||
assert.cooked("Call techapj the computer whiz at 555-555-1234 for free help.",
|
assert.cooked("Call techapj the computer whiz at 555-555-1234 for free help.",
|
||||||
"<p>Call ■■■■■■■ the computer ■■■■ at 555-■■■■■■■■ for free help.</p>",
|
"<p>Call ■■■■■■■ the computer ■■■■ at 555-■■■■■■■■ for free help.</p>",
|
||||||
"uses both censored words and patterns from site settings");
|
"uses both censored words and patterns from site settings");
|
||||||
|
|
||||||
assert.cooked("I have a pen, I have an a**le",
|
assert.cooked("I have a pen, I have an a**le",
|
||||||
"<p>I have a pen, I have an ■■■■■</p>",
|
"<p>I have a pen, I have an ■■■■■</p>",
|
||||||
"it escapes regexp chars");
|
"it escapes regexp chars");
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("code blocks/spans hoisting", assert => {
|
QUnit.test("code blocks/spans hoisting", assert => {
|
||||||
assert.cooked("```\n\n some code\n```",
|
assert.cooked("```\n\n some code\n```",
|
||||||
"<p><pre><code class=\"lang-auto\"> some code</code></pre></p>",
|
"<pre><code class=\"lang-auto\">\n some code\n</code></pre>",
|
||||||
"it works when nesting standard markdown code blocks within a fenced code block");
|
"it works when nesting standard markdown code blocks within a fenced code block");
|
||||||
|
|
||||||
assert.cooked("`$&`",
|
assert.cooked("`$&`",
|
||||||
|
@ -575,47 +606,42 @@ QUnit.test('basic bbcode', assert => {
|
||||||
assert.cookedPara("[i]emphasis[/i]", "<span class=\"bbcode-i\">emphasis</span>", "italics text");
|
assert.cookedPara("[i]emphasis[/i]", "<span class=\"bbcode-i\">emphasis</span>", "italics text");
|
||||||
assert.cookedPara("[u]underlined[/u]", "<span class=\"bbcode-u\">underlined</span>", "underlines text");
|
assert.cookedPara("[u]underlined[/u]", "<span class=\"bbcode-u\">underlined</span>", "underlines text");
|
||||||
assert.cookedPara("[s]strikethrough[/s]", "<span class=\"bbcode-s\">strikethrough</span>", "strikes-through text");
|
assert.cookedPara("[s]strikethrough[/s]", "<span class=\"bbcode-s\">strikethrough</span>", "strikes-through text");
|
||||||
assert.cookedPara("[img]http://eviltrout.com/eviltrout.png[/img]", "<img src=\"http://eviltrout.com/eviltrout.png\">", "links images");
|
assert.cookedPara("[img]http://eviltrout.com/eviltrout.png[/img]", "<img src=\"http://eviltrout.com/eviltrout.png\" alt>", "links images");
|
||||||
assert.cookedPara("[email]eviltrout@mailinator.com[/email]", "<a href=\"mailto:eviltrout@mailinator.com\">eviltrout@mailinator.com</a>", "supports [email] without a title");
|
assert.cookedPara("[email]eviltrout@mailinator.com[/email]", "<a href=\"mailto:eviltrout@mailinator.com\" data-bbcode=\"true\">eviltrout@mailinator.com</a>", "supports [email] without a title");
|
||||||
assert.cookedPara("[b]evil [i]trout[/i][/b]",
|
assert.cookedPara("[b]evil [i]trout[/i][/b]",
|
||||||
"<span class=\"bbcode-b\">evil <span class=\"bbcode-i\">trout</span></span>",
|
"<span class=\"bbcode-b\">evil <span class=\"bbcode-i\">trout</span></span>",
|
||||||
"allows embedding of tags");
|
"allows embedding of tags");
|
||||||
assert.cookedPara("[EMAIL]eviltrout@mailinator.com[/EMAIL]", "<a href=\"mailto:eviltrout@mailinator.com\">eviltrout@mailinator.com</a>", "supports upper case bbcode");
|
assert.cookedPara("[EMAIL]eviltrout@mailinator.com[/EMAIL]", "<a href=\"mailto:eviltrout@mailinator.com\" data-bbcode=\"true\">eviltrout@mailinator.com</a>", "supports upper case bbcode");
|
||||||
assert.cookedPara("[b]strong [b]stronger[/b][/b]", "<span class=\"bbcode-b\">strong <span class=\"bbcode-b\">stronger</span></span>", "accepts nested bbcode tags");
|
assert.cookedPara("[b]strong [b]stronger[/b][/b]", "<span class=\"bbcode-b\">strong <span class=\"bbcode-b\">stronger</span></span>", "accepts nested bbcode tags");
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test('urls', assert => {
|
QUnit.test('urls', assert => {
|
||||||
assert.cookedPara("[url]not a url[/url]", "not a url", "supports [url] that isn't a url");
|
assert.cookedPara("[url]not a url[/url]", "not a url", "supports [url] that isn't a url");
|
||||||
assert.cookedPara("[url]abc.com[/url]", "abc.com", "no error when a url has no protocol and begins with a");
|
assert.cookedPara("[url]abc.com[/url]", "<a href=\"http://abc.com\">abc.com</a>", "it magically links using linkify");
|
||||||
assert.cookedPara("[url]http://bettercallsaul.com[/url]", "<a href=\"http://bettercallsaul.com\">http://bettercallsaul.com</a>", "supports [url] without parameter");
|
assert.cookedPara("[url]http://bettercallsaul.com[/url]", "<a href=\"http://bettercallsaul.com\" data-bbcode=\"true\">http://bettercallsaul.com</a>", "supports [url] without parameter");
|
||||||
assert.cookedPara("[url=http://example.com]example[/url]", "<a href=\"http://example.com\">example</a>", "supports [url] with given href");
|
assert.cookedPara("[url=http://example.com]example[/url]", "<a href=\"http://example.com\" data-bbcode=\"true\">example</a>", "supports [url] with given href");
|
||||||
assert.cookedPara("[url=http://www.example.com][img]http://example.com/logo.png[/img][/url]",
|
assert.cookedPara("[url=http://www.example.com][img]http://example.com/logo.png[/img][/url]",
|
||||||
"<a href=\"http://www.example.com\"><img src=\"http://example.com/logo.png\"></a>",
|
"<a href=\"http://www.example.com\" data-bbcode=\"true\"><img src=\"http://example.com/logo.png\" alt></a>",
|
||||||
"supports [url] with an embedded [img]");
|
"supports [url] with an embedded [img]");
|
||||||
});
|
});
|
||||||
QUnit.test('invalid bbcode', assert => {
|
QUnit.test('invalid bbcode', assert => {
|
||||||
const result = new PrettyText({ lookupAvatar: false }).cook("[code]I am not closed\n\nThis text exists.");
|
assert.cooked("[code]I am not closed\n\nThis text exists.",
|
||||||
assert.equal(result, "<p>[code]I am not closed</p>\n\n<p>This text exists.</p>", "does not raise an error with an open bbcode tag.");
|
"<p>[code]I am not closed</p>\n<p>This text exists.</p>",
|
||||||
|
"does not raise an error with an open bbcode tag.");
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test('code', assert => {
|
QUnit.test('code', assert => {
|
||||||
assert.cookedPara("[code]\nx++\n[/code]", "<pre><code class=\"lang-auto\">x++</code></pre>", "makes code into pre");
|
assert.cooked("[code]\nx++\n[/code]", "<pre><code class=\"lang-auto\">x++</code></pre>", "makes code into pre");
|
||||||
assert.cookedPara("[code]\nx++\ny++\nz++\n[/code]", "<pre><code class=\"lang-auto\">x++\ny++\nz++</code></pre>", "makes code into pre");
|
assert.cooked("[code]\nx++\ny++\nz++\n[/code]", "<pre><code class=\"lang-auto\">x++\ny++\nz++</code></pre>", "makes code into pre");
|
||||||
assert.cookedPara("[code]abc\n#def\n[/code]", '<pre><code class=\"lang-auto\">abc\n#def</code></pre>', 'it handles headings in a [code] block');
|
assert.cooked("[code]\nabc\n#def\n[/code]", '<pre><code class=\"lang-auto\">abc\n#def</code></pre>', 'it handles headings in a [code] block');
|
||||||
assert.cookedPara("[code]\n s[/code]",
|
assert.cooked("[code]\n s\n[/code]",
|
||||||
"<pre><code class=\"lang-auto\"> s</code></pre>",
|
"<pre><code class=\"lang-auto\"> s</code></pre>",
|
||||||
"it doesn't trim leading whitespace");
|
"it doesn't trim leading whitespace");
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test('lists', assert => {
|
|
||||||
assert.cookedPara("[ul][li]option one[/li][/ul]", "<ul><li>option one</li></ul>", "creates an ul");
|
|
||||||
assert.cookedPara("[ol][li]option one[/li][/ol]", "<ol><li>option one</li></ol>", "creates an ol");
|
|
||||||
assert.cookedPara("[ul]\n[li]option one[/li]\n[li]option two[/li]\n[/ul]", "<ul><li>option one</li><li>option two</li></ul>", "suppresses empty lines in lists");
|
|
||||||
});
|
|
||||||
|
|
||||||
QUnit.test('tags with arguments', assert => {
|
QUnit.test('tags with arguments', assert => {
|
||||||
assert.cookedPara("[url=http://bettercallsaul.com]better call![/url]", "<a href=\"http://bettercallsaul.com\">better call!</a>", "supports [url] with a title");
|
assert.cookedPara("[url=http://bettercallsaul.com]better call![/url]", "<a href=\"http://bettercallsaul.com\" data-bbcode=\"true\">better call!</a>", "supports [url] with a title");
|
||||||
assert.cookedPara("[email=eviltrout@mailinator.com]evil trout[/email]", "<a href=\"mailto:eviltrout@mailinator.com\">evil trout</a>", "supports [email] with a title");
|
assert.cookedPara("[email=eviltrout@mailinator.com]evil trout[/email]", "<a href=\"mailto:eviltrout@mailinator.com\" data-bbcode=\"true\">evil trout</a>", "supports [email] with a title");
|
||||||
assert.cookedPara("[u][i]abc[/i][/u]", "<span class=\"bbcode-u\"><span class=\"bbcode-i\">abc</span></span>", "can nest tags");
|
assert.cookedPara("[u][i]abc[/i][/u]", "<span class=\"bbcode-u\"><span class=\"bbcode-i\">abc</span></span>", "can nest tags");
|
||||||
assert.cookedPara("[b]first[/b] [b]second[/b]", "<span class=\"bbcode-b\">first</span> <span class=\"bbcode-b\">second</span>", "can bold two things on the same line");
|
assert.cookedPara("[b]first[/b] [b]second[/b]", "<span class=\"bbcode-b\">first</span> <span class=\"bbcode-b\">second</span>", "can bold two things on the same line");
|
||||||
});
|
});
|
||||||
|
@ -655,70 +681,140 @@ QUnit.test("quotes", assert => {
|
||||||
"[quote=\"eviltrout, post:1, topic:2\"]\nthis is <not> a bug\n[/quote]\n\n",
|
"[quote=\"eviltrout, post:1, topic:2\"]\nthis is <not> a bug\n[/quote]\n\n",
|
||||||
"it escapes the contents of the quote");
|
"it escapes the contents of the quote");
|
||||||
|
|
||||||
assert.cookedPara("[quote]test[/quote]",
|
assert.cooked("[quote]\ntest\n[/quote]",
|
||||||
"<aside class=\"quote\"><blockquote><p>test</p></blockquote></aside>",
|
"<aside class=\"quote\">\n<blockquote>\n<p>test</p>\n</blockquote>\n</aside>",
|
||||||
"it supports quotes without params");
|
"it supports quotes without params");
|
||||||
|
|
||||||
assert.cookedPara("[quote]\n*test*\n[/quote]",
|
assert.cooked("[quote]\n*test*\n[/quote]",
|
||||||
"<aside class=\"quote\"><blockquote><p><em>test</em></p></blockquote></aside>",
|
"<aside class=\"quote\">\n<blockquote>\n<p><em>test</em></p>\n</blockquote>\n</aside>",
|
||||||
"it doesn't insert a new line for italics");
|
"it doesn't insert a new line for italics");
|
||||||
|
|
||||||
assert.cookedPara("[quote=,script='a'><script>alert('test');//':a][/quote]",
|
assert.cooked("[quote=,script='a'><script>alert('test');//':a]\n[/quote]",
|
||||||
"<aside class=\"quote\"><blockquote></blockquote></aside>",
|
"<aside class=\"quote\">\n<blockquote></blockquote>\n</aside>",
|
||||||
"It will not create a script tag within an attribute");
|
"It will not create a script tag within an attribute");
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("quote formatting", assert => {
|
QUnit.test("quote formatting", assert => {
|
||||||
|
|
||||||
assert.cooked("[quote=\"EvilTrout, post:123, topic:456, full:true\"][sam][/quote]",
|
assert.cooked("[quote=\"EvilTrout, post:123, topic:456, full:true\"]\n[sam]\n[/quote]",
|
||||||
"<aside class=\"quote\" data-post=\"123\" data-topic=\"456\" data-full=\"true\"><div class=\"title\">" +
|
`<aside class=\"quote\" data-post=\"123\" data-topic=\"456\" data-full=\"true\">
|
||||||
"<div class=\"quote-controls\"></div>EvilTrout:</div><blockquote><p>[sam]</p></blockquote></aside>",
|
<div class=\"title\">
|
||||||
|
<div class=\"quote-controls\"></div>
|
||||||
|
EvilTrout:</div>
|
||||||
|
<blockquote>
|
||||||
|
<p>[sam]</p>
|
||||||
|
</blockquote>
|
||||||
|
</aside>`,
|
||||||
"it allows quotes with [] inside");
|
"it allows quotes with [] inside");
|
||||||
|
|
||||||
assert.cooked("[quote=\"eviltrout, post:1, topic:1\"]abc[/quote]",
|
assert.cooked("[quote=\"eviltrout, post:1, topic:1\"]\nabc\n[/quote]",
|
||||||
"<aside class=\"quote\" data-post=\"1\" data-topic=\"1\"><div class=\"title\"><div class=\"quote-controls\"></div>eviltrout:" +
|
`<aside class=\"quote\" data-post=\"1\" data-topic=\"1\">
|
||||||
"</div><blockquote><p>abc</p></blockquote></aside>",
|
<div class=\"title\">
|
||||||
|
<div class=\"quote-controls\"></div>
|
||||||
|
eviltrout:</div>
|
||||||
|
<blockquote>
|
||||||
|
<p>abc</p>
|
||||||
|
</blockquote>
|
||||||
|
</aside>`,
|
||||||
"renders quotes properly");
|
"renders quotes properly");
|
||||||
|
|
||||||
assert.cooked("[quote=\"eviltrout, post:1, topic:1\"]abc[/quote]\nhello",
|
assert.cooked("[quote=\"eviltrout, post:1, topic:1\"]\nabc\n[/quote]\nhello",
|
||||||
"<aside class=\"quote\" data-post=\"1\" data-topic=\"1\"><div class=\"title\"><div class=\"quote-controls\"></div>eviltrout:" +
|
`<aside class=\"quote\" data-post=\"1\" data-topic=\"1\">
|
||||||
"</div><blockquote><p>abc</p></blockquote></aside>\n\n<p>hello</p>",
|
<div class=\"title\">
|
||||||
|
<div class=\"quote-controls\"></div>
|
||||||
|
eviltrout:</div>
|
||||||
|
<blockquote>
|
||||||
|
<p>abc</p>
|
||||||
|
</blockquote>
|
||||||
|
</aside>
|
||||||
|
<p>hello</p>`,
|
||||||
"handles new lines properly");
|
"handles new lines properly");
|
||||||
|
|
||||||
assert.cooked("[quote=\"Alice, post:1, topic:1\"]\n[quote=\"Bob, post:2, topic:1\"]\n[/quote]\n[/quote]",
|
assert.cooked("[quote=\"Alice, post:1, topic:1\"]\n[quote=\"Bob, post:2, topic:1\"]\n[/quote]\n[/quote]",
|
||||||
"<aside class=\"quote\" data-post=\"1\" data-topic=\"1\"><div class=\"title\"><div class=\"quote-controls\"></div>Alice:" +
|
`<aside class=\"quote\" data-post=\"1\" data-topic=\"1\">
|
||||||
"</div><blockquote><aside class=\"quote\" data-post=\"2\" data-topic=\"1\"><div class=\"title\"><div class=\"quote-controls\"></div>Bob:" +
|
<div class=\"title\">
|
||||||
"</div><blockquote></blockquote></aside></blockquote></aside>",
|
<div class=\"quote-controls\"></div>
|
||||||
|
Alice:</div>
|
||||||
|
<blockquote>
|
||||||
|
<aside class=\"quote\" data-post=\"2\" data-topic=\"1\">
|
||||||
|
<div class=\"title\">
|
||||||
|
<div class=\"quote-controls\"></div>
|
||||||
|
Bob:</div>
|
||||||
|
<blockquote></blockquote>
|
||||||
|
</aside>
|
||||||
|
</blockquote>
|
||||||
|
</aside>`,
|
||||||
"quotes can be nested");
|
"quotes can be nested");
|
||||||
|
|
||||||
assert.cooked("[quote=\"Alice, post:1, topic:1\"]\n[quote=\"Bob, post:2, topic:1\"]\n[/quote]",
|
assert.cooked("[quote=\"Alice, post:1, topic:1\"]\n[quote=\"Bob, post:2, topic:1\"]\n[/quote]",
|
||||||
"<aside class=\"quote\" data-post=\"1\" data-topic=\"1\"><div class=\"title\"><div class=\"quote-controls\"></div>Alice:" +
|
`<p>[quote="Alice, post:1, topic:1"]</p>
|
||||||
"</div><blockquote><p>[quote=\"Bob, post:2, topic:1\"]</p></blockquote></aside>",
|
<aside class=\"quote\" data-post=\"2\" data-topic=\"1\">
|
||||||
"handles mismatched nested quote tags");
|
<div class=\"title\">
|
||||||
|
<div class=\"quote-controls\"></div>
|
||||||
|
Bob:</div>
|
||||||
|
<blockquote></blockquote>
|
||||||
|
</aside>`,
|
||||||
|
|
||||||
|
"handles mismatched nested quote tags (non greedy)");
|
||||||
|
|
||||||
assert.cooked("[quote=\"Alice, post:1, topic:1\"]\n```javascript\nvar foo ='foo';\nvar bar = 'bar';\n```\n[/quote]",
|
assert.cooked("[quote=\"Alice, post:1, topic:1\"]\n```javascript\nvar foo ='foo';\nvar bar = 'bar';\n```\n[/quote]",
|
||||||
"<aside class=\"quote\" data-post=\"1\" data-topic=\"1\"><div class=\"title\"><div class=\"quote-controls\"></div>Alice:</div><blockquote><p><pre><code class=\"lang-javascript\">var foo ='foo';\nvar bar = 'bar';</code></pre></p></blockquote></aside>",
|
`<aside class=\"quote\" data-post=\"1\" data-topic=\"1\">
|
||||||
|
<div class=\"title\">
|
||||||
|
<div class=\"quote-controls\"></div>
|
||||||
|
Alice:</div>
|
||||||
|
<blockquote>
|
||||||
|
<pre><code class=\"lang-javascript\">var foo ='foo';
|
||||||
|
var bar = 'bar';
|
||||||
|
</code></pre>
|
||||||
|
</blockquote>
|
||||||
|
</aside>`,
|
||||||
"quotes can have code blocks without leading newline");
|
"quotes can have code blocks without leading newline");
|
||||||
|
|
||||||
assert.cooked("[quote=\"Alice, post:1, topic:1\"]\n\n```javascript\nvar foo ='foo';\nvar bar = 'bar';\n```\n[/quote]",
|
assert.cooked("[quote=\"Alice, post:1, topic:1\"]\n\n```javascript\nvar foo ='foo';\nvar bar = 'bar';\n```\n[/quote]",
|
||||||
"<aside class=\"quote\" data-post=\"1\" data-topic=\"1\"><div class=\"title\"><div class=\"quote-controls\"></div>Alice:</div><blockquote><p><pre><code class=\"lang-javascript\">var foo ='foo';\nvar bar = 'bar';</code></pre></p></blockquote></aside>",
|
`<aside class=\"quote\" data-post=\"1\" data-topic=\"1\">
|
||||||
|
<div class=\"title\">
|
||||||
|
<div class=\"quote-controls\"></div>
|
||||||
|
Alice:</div>
|
||||||
|
<blockquote>
|
||||||
|
<pre><code class=\"lang-javascript\">var foo ='foo';
|
||||||
|
var bar = 'bar';
|
||||||
|
</code></pre>
|
||||||
|
</blockquote>
|
||||||
|
</aside>`,
|
||||||
"quotes can have code blocks with leading newline");
|
"quotes can have code blocks with leading newline");
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("quotes with trailing formatting", assert => {
|
QUnit.test("quotes with trailing formatting", assert => {
|
||||||
const result = new PrettyText(defaultOpts).cook("[quote=\"EvilTrout, post:123, topic:456, full:true\"]\nhello\n[/quote]\n*Test*");
|
const result = new PrettyText(defaultOpts).cook("[quote=\"EvilTrout, post:123, topic:456, full:true\"]\nhello\n[/quote]\n*Test*");
|
||||||
assert.equal(result,
|
assert.equal(result,
|
||||||
"<aside class=\"quote\" data-post=\"123\" data-topic=\"456\" data-full=\"true\"><div class=\"title\">" +
|
`<aside class=\"quote\" data-post=\"123\" data-topic=\"456\" data-full=\"true\">
|
||||||
"<div class=\"quote-controls\"></div>EvilTrout:</div><blockquote><p>hello</p></blockquote></aside>\n\n<p><em>Test</em></p>",
|
<div class=\"title\">
|
||||||
|
<div class=\"quote-controls\"></div>
|
||||||
|
EvilTrout:</div>
|
||||||
|
<blockquote>
|
||||||
|
<p>hello</p>
|
||||||
|
</blockquote>
|
||||||
|
</aside>
|
||||||
|
<p><em>Test</em></p>`,
|
||||||
"it allows trailing formatting");
|
"it allows trailing formatting");
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("enable/disable features", assert => {
|
QUnit.test("enable/disable features", assert => {
|
||||||
const table = `<table><tr><th>hello</th></tr><tr><td>world</td></tr></table>`;
|
|
||||||
const hasTable = new PrettyText({ features: {table: true}, sanitize: true}).cook(table);
|
|
||||||
assert.equal(hasTable, `<table class="md-table"><tr><th>hello</th></tr><tr><td>world</td></tr></table>`);
|
|
||||||
|
|
||||||
const noTable = new PrettyText({ features: { table: false }, sanitize: true}).cook(table);
|
assert.cookedOptions('|a|\n--\n|a|', { features: {table: false} }, '');
|
||||||
assert.equal(noTable, `<p></p>`, 'tables are stripped when disabled');
|
assert.cooked('|a|\n--\n|a|',
|
||||||
|
`<table>
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>a</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td>a</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>`);
|
||||||
});
|
});
|
||||||
|
|
||||||
QUnit.test("emoji", assert => {
|
QUnit.test("emoji", assert => {
|
||||||
|
@ -729,6 +825,6 @@ QUnit.test("emoji", assert => {
|
||||||
|
|
||||||
QUnit.test("emoji - emojiSet", assert => {
|
QUnit.test("emoji - emojiSet", assert => {
|
||||||
assert.cookedOptions(":smile:",
|
assert.cookedOptions(":smile:",
|
||||||
{ emojiSet: 'twitter' },
|
{ siteSettings : { emoji_set: 'twitter' }},
|
||||||
`<p><img src="/images/emoji/twitter/smile.png?v=${v}" title=":smile:" class="emoji" alt=":smile:"></p>`);
|
`<p><img src="/images/emoji/twitter/smile.png?v=${v}" title=":smile:" class="emoji" alt=":smile:"></p>`);
|
||||||
});
|
});
|
||||||
|
|
|
@ -12,15 +12,15 @@ QUnit.test("sanitize", assert => {
|
||||||
assert.equal(pt.sanitize("<div><p class=\"funky\" wrong='1'>hello</p></div>"), "<div><p>hello</p></div>");
|
assert.equal(pt.sanitize("<div><p class=\"funky\" wrong='1'>hello</p></div>"), "<div><p>hello</p></div>");
|
||||||
assert.equal(pt.sanitize("<3 <3"), "<3 <3");
|
assert.equal(pt.sanitize("<3 <3"), "<3 <3");
|
||||||
assert.equal(pt.sanitize("<_<"), "<_<");
|
assert.equal(pt.sanitize("<_<"), "<_<");
|
||||||
|
|
||||||
cooked("hello<script>alert(42)</script>", "<p>hello</p>", "it sanitizes while cooking");
|
cooked("hello<script>alert(42)</script>", "<p>hello</p>", "it sanitizes while cooking");
|
||||||
|
|
||||||
cooked("<a href='http://disneyland.disney.go.com/'>disney</a> <a href='http://reddit.com'>reddit</a>",
|
cooked("<a href='http://disneyland.disney.go.com/'>disney</a> <a href='http://reddit.com'>reddit</a>",
|
||||||
"<p><a href=\"http://disneyland.disney.go.com/\">disney</a> <a href=\"http://reddit.com\">reddit</a></p>",
|
"<p><a href=\"http://disneyland.disney.go.com/\">disney</a> <a href=\"http://reddit.com\">reddit</a></p>",
|
||||||
"we can embed proper links");
|
"we can embed proper links");
|
||||||
|
|
||||||
cooked("<center>hello</center>", "<p>hello</p>", "it does not allow centering");
|
cooked("<center>hello</center>", "hello", "it does not allow centering");
|
||||||
cooked("<table><tr><td>hello</td></tr></table>\nafter", "<p>after</p>", "it does not allow tables");
|
cooked("<blockquote>a\n</blockquote>\n", "<blockquote>a\n</blockquote>", "it does not double sanitize");
|
||||||
cooked("<blockquote>a\n</blockquote>\n", "<blockquote>a\n\n<br/>\n\n</blockquote>", "it does not double sanitize");
|
|
||||||
|
|
||||||
cooked("<iframe src=\"http://discourse.org\" width=\"100\" height=\"42\"></iframe>", "", "it does not allow most iframes");
|
cooked("<iframe src=\"http://discourse.org\" width=\"100\" height=\"42\"></iframe>", "", "it does not allow most iframes");
|
||||||
|
|
||||||
|
@ -38,9 +38,9 @@ QUnit.test("sanitize", assert => {
|
||||||
assert.equal(pt.sanitize("<progress>hello"), "hello");
|
assert.equal(pt.sanitize("<progress>hello"), "hello");
|
||||||
assert.equal(pt.sanitize("<mark>highlight</mark>"), "highlight");
|
assert.equal(pt.sanitize("<mark>highlight</mark>"), "highlight");
|
||||||
|
|
||||||
cooked("[the answer](javascript:alert(42))", "<p><a>the answer</a></p>", "it prevents XSS");
|
cooked("[the answer](javascript:alert(42))", "<p>[the answer](javascript:alert(42))</p>", "it prevents XSS");
|
||||||
|
|
||||||
cooked("<i class=\"fa fa-bug fa-spin\" style=\"font-size:600%\"></i>\n<!-- -->", "<p><i></i><br/></p>", "it doesn't circumvent XSS with comments");
|
cooked("<i class=\"fa fa-bug fa-spin\" style=\"font-size:600%\"></i>\n<!-- -->", "<p><i></i></p>", "it doesn't circumvent XSS with comments");
|
||||||
|
|
||||||
cooked("<span class=\"-bbcode-s fa fa-spin\">a</span>", "<p><span>a</span></p>", "it sanitizes spans");
|
cooked("<span class=\"-bbcode-s fa fa-spin\">a</span>", "<p><span>a</span></p>", "it sanitizes spans");
|
||||||
cooked("<span class=\"fa fa-spin -bbcode-s\">a</span>", "<p><span>a</span></p>", "it sanitizes spans");
|
cooked("<span class=\"fa fa-spin -bbcode-s\">a</span>", "<p><span>a</span></p>", "it sanitizes spans");
|
||||||
|
|
Loading…
Reference in New Issue