FIX: don't create tags autolinks inside links (new engine)

This commit is contained in:
Sam 2017-07-07 13:03:36 -04:00
parent c47658c7c5
commit 6f09df0deb
3 changed files with 111 additions and 43 deletions

View File

@ -1,6 +1,6 @@
import { inlineRegexRule } from 'pretty-text/engines/markdown-it/helpers'; import { textReplace } from 'pretty-text/engines/markdown-it/helpers';
function emitter(matches, state) { function addHashtag(buffer, matches, state) {
const options = state.md.options.discourse; const options = state.md.options.discourse;
const [hashtag, slug] = matches; const [hashtag, slug] = matches;
const categoryHashtagLookup = options.categoryHashtagLookup; const categoryHashtagLookup = options.categoryHashtagLookup;
@ -9,34 +9,89 @@ function emitter(matches, state) {
let token; let token;
if (result) { if (result) {
token = state.push('link_open', 'a', 1); token = new state.Token('link_open', 'a', 1);
token.attrs = [['class', 'hashtag'], ['href', result[0]]]; token.attrs = [['class', 'hashtag'], ['href', result[0]]];
token.block = false; token.block = false;
buffer.push(token);
token = state.push('text', '', 0); token = new state.Token('text', '', 0);
token.content = '#'; token.content = '#';
buffer.push(token);
token = state.push('span_open', 'span', 1); token = new state.Token('span_open', 'span', 1);
token.block = false; token.block = false;
buffer.push(token);
token = state.push('text', '', 0); token = new state.Token('text', '', 0);
token.content = result[1]; token.content = result[1];
buffer.push(token);
state.push('span_close', 'span', -1); buffer.push(new state.Token('span_close', 'span', -1));
state.push('link_close', 'a', -1); buffer.push(new state.Token('link_close', 'a', -1));
} else { } else {
token = state.push('span_open', 'span', 1); token = new state.Token('span_open', 'span', 1);
token.attrs = [['class', 'hashtag']]; token.attrs = [['class', 'hashtag']];
buffer.push(token);
token = state.push('text', '', 0); token = new state.Token('text', '', 0);
token.content = hashtag; token.content = hashtag;
buffer.push(token);
token = state.push('span_close', 'span', -1); token = new state.Token('span_close', 'span', -1);
buffer.push(token);
}
} }
return true; const REGEX = /#([\w-:]{1,101})/gi;
function allowedBoundary(content, index, utils) {
let code = content.charCodeAt(index);
return (utils.isWhiteSpace(code) || utils.isPunctChar(String.fromCharCode(code)));
}
function applyHashtag(content, state) {
let result = null,
match,
pos = 0;
while (match = REGEX.exec(content)) {
// check boundary
if (match.index > 0) {
if (!allowedBoundary(content, match.index-1, state.md.utils)) {
console.log("not allowed");
continue;
}
}
// check forward boundary as well
if (match.index + match[0].length < content.length) {
if (!allowedBoundary(content, match.index + match[0].length, state.md.utils)) {
continue;
}
}
if (match.index > pos) {
result = result || [];
let token = new state.Token('text', '', 0);
token.content = content.slice(pos, match.index);
result.push(token);
}
result = result || [];
addHashtag(result, match, state);
pos = match.index + match[0].length;
}
if (result && pos < content.length) {
let token = new state.Token('text', '', 0);
token.content = content.slice(pos);
result.push(token);
}
return result;
} }
export function setup(helper) { export function setup(helper) {
@ -45,14 +100,8 @@ export function setup(helper) {
helper.registerPlugin(md=>{ helper.registerPlugin(md=>{
const rule = inlineRegexRule(md, { md.core.ruler.push('category-hashtag', state => textReplace(
start: '#', state, applyHashtag, true /* skip all links */
matcher: /^#([\w-:]{1,101})/i, ));
skipInLink: true,
maxLength: 102,
emitter: emitter
});
md.inline.ruler.push('category-hashtag', rule);
}); });
} }

View File

@ -12,6 +12,7 @@ export default null;
// matcher: /^#([\w-:]{1,101})/i, // matcher: /^#([\w-:]{1,101})/i,
// emitter: emitter // emitter: emitter
// }); // });
export function inlineRegexRule(md, options) { export function inlineRegexRule(md, options) {
const start = options.start.charCodeAt(0); const start = options.start.charCodeAt(0);
@ -27,7 +28,7 @@ export function inlineRegexRule(md, options) {
// test prev // test prev
if (pos > 0) { if (pos > 0) {
let prev = state.src.charCodeAt(pos-1); let prev = state.src.charCodeAt(pos-1);
if (!md.utils.isSpace(prev) && !md.utils.isPunctChar(String.fromCharCode(prev))) { if (!md.utils.isWhiteSpace(prev) && !md.utils.isPunctChar(String.fromCharCode(prev))) {
return false; return false;
} }
} }
@ -38,10 +39,10 @@ export function inlineRegexRule(md, options) {
for(i=state.tokens.length-1;i>=0;i--) { for(i=state.tokens.length-1;i>=0;i--) {
let token = state.tokens[i]; let token = state.tokens[i];
let type = token.type; let type = token.type;
if (type === 'link_open' || (type === 'html_inline' && token.content.substr(0,2) === "<a")) { if (type === 'link_open' || (type === 'html_inline' && token.content.substr(0,2).toLowerCase() === "<a")) {
return false; return false;
} }
if (type.block || type === 'link_close' || (type === 'html_inline' && token.content.substr(0,3).toLowerCase() === "</a>")) { if (type.block || type === 'link_close' || (type === 'html_inline' && token.content.substr(0,4).toLowerCase() === "</a>")) {
break; break;
} }
} }
@ -75,10 +76,10 @@ export function inlineRegexRule(md, options) {
// based off https://github.com/markdown-it/markdown-it-emoji/blob/master/dist/markdown-it-emoji.js // based off https://github.com/markdown-it/markdown-it-emoji/blob/master/dist/markdown-it-emoji.js
// //
export function textReplace(state, callback) { export function textReplace(state, callback, skipAllLinks) {
var i, j, l, tokens, token, var i, j, l, tokens, token,
blockTokens = state.tokens, blockTokens = state.tokens,
autolinkLevel = 0; linkLevel = 0;
for (j = 0, l = blockTokens.length; j < l; j++) { for (j = 0, l = blockTokens.length; j < l; j++) {
if (blockTokens[j].type !== 'inline') { continue; } if (blockTokens[j].type !== 'inline') { continue; }
@ -89,11 +90,23 @@ export function textReplace(state, callback) {
for (i = tokens.length - 1; i >= 0; i--) { for (i = tokens.length - 1; i >= 0; i--) {
token = tokens[i]; token = tokens[i];
if (skipAllLinks) {
if (token.type === 'link_open' || token.type === 'link_close') { if (token.type === 'link_open' || token.type === 'link_close') {
if (token.info === 'auto') { autolinkLevel -= token.nesting; } linkLevel -= token.nesting;
} else if (token.type === 'html_inline') {
if (token.content.substr(0,2).toLowerCase() === "<a") {
linkLevel++;
} else if (token.content.substr(0,4).toLowerCase() === "</a>") {
linkLevel--;
}
}
} else {
if (token.type === 'link_open' || token.type === 'link_close') {
if (token.info === 'auto') { linkLevel -= token.nesting; }
}
} }
if (token.type === 'text' && autolinkLevel === 0) { if (token.type === 'text' && linkLevel === 0) {
let split; let split;
if(split = callback(token.content, state)) { if(split = callback(token.content, state)) {
// replace current node // replace current node

View File

@ -561,6 +561,12 @@ HTML
HTML HTML
expect(cooked).to eq(html.strip) expect(cooked).to eq(html.strip)
# ensure it does not fight with the autolinker
expect(PrettyText.cook(' http://somewhere.com/#known')).not_to include('hashtag')
expect(PrettyText.cook(' http://somewhere.com/?#known')).not_to include('hashtag')
expect(PrettyText.cook(' http://somewhere.com/?abc#known')).not_to include('hashtag')
end end
it "can handle mixed lists" do it "can handle mixed lists" do