FIX: don't create tags autolinks inside links (new engine)
This commit is contained in:
parent
c47658c7c5
commit
6f09df0deb
|
@ -1,42 +1,97 @@
|
|||
import { inlineRegexRule } from 'pretty-text/engines/markdown-it/helpers';
|
||||
import { textReplace } from 'pretty-text/engines/markdown-it/helpers';
|
||||
|
||||
function emitter(matches, state) {
|
||||
const options = state.md.options.discourse;
|
||||
const [hashtag, slug] = matches;
|
||||
const categoryHashtagLookup = options.categoryHashtagLookup;
|
||||
const result = categoryHashtagLookup && categoryHashtagLookup(slug);
|
||||
function addHashtag(buffer, matches, state) {
|
||||
const options = state.md.options.discourse;
|
||||
const [hashtag, slug] = matches;
|
||||
const categoryHashtagLookup = options.categoryHashtagLookup;
|
||||
const result = categoryHashtagLookup && categoryHashtagLookup(slug);
|
||||
|
||||
let token;
|
||||
let token;
|
||||
|
||||
if (result) {
|
||||
token = state.push('link_open', 'a', 1);
|
||||
token.attrs = [['class', 'hashtag'], ['href', result[0]]];
|
||||
token.block = false;
|
||||
if (result) {
|
||||
token = new state.Token('link_open', 'a', 1);
|
||||
token.attrs = [['class', 'hashtag'], ['href', result[0]]];
|
||||
token.block = false;
|
||||
buffer.push(token);
|
||||
|
||||
token = state.push('text', '', 0);
|
||||
token.content = '#';
|
||||
token = new state.Token('text', '', 0);
|
||||
token.content = '#';
|
||||
buffer.push(token);
|
||||
|
||||
token = state.push('span_open', 'span', 1);
|
||||
token.block = false;
|
||||
token = new state.Token('span_open', 'span', 1);
|
||||
token.block = false;
|
||||
buffer.push(token);
|
||||
|
||||
token = state.push('text', '', 0);
|
||||
token.content = result[1];
|
||||
token = new state.Token('text', '', 0);
|
||||
token.content = result[1];
|
||||
buffer.push(token);
|
||||
|
||||
state.push('span_close', 'span', -1);
|
||||
buffer.push(new state.Token('span_close', 'span', -1));
|
||||
|
||||
state.push('link_close', 'a', -1);
|
||||
} else {
|
||||
buffer.push(new state.Token('link_close', 'a', -1));
|
||||
} else {
|
||||
|
||||
token = state.push('span_open', 'span', 1);
|
||||
token.attrs = [['class', 'hashtag']];
|
||||
token = new state.Token('span_open', 'span', 1);
|
||||
token.attrs = [['class', 'hashtag']];
|
||||
buffer.push(token);
|
||||
|
||||
token = state.push('text', '', 0);
|
||||
token.content = hashtag;
|
||||
token = new state.Token('text', '', 0);
|
||||
token.content = hashtag;
|
||||
buffer.push(token);
|
||||
|
||||
token = state.push('span_close', 'span', -1);
|
||||
token = new state.Token('span_close', 'span', -1);
|
||||
buffer.push(token);
|
||||
}
|
||||
}
|
||||
|
||||
const REGEX = /#([\w-:]{1,101})/gi;
|
||||
|
||||
function allowedBoundary(content, index, utils) {
|
||||
let code = content.charCodeAt(index);
|
||||
return (utils.isWhiteSpace(code) || utils.isPunctChar(String.fromCharCode(code)));
|
||||
}
|
||||
|
||||
function applyHashtag(content, state) {
|
||||
let result = null,
|
||||
match,
|
||||
pos = 0;
|
||||
|
||||
while (match = REGEX.exec(content)) {
|
||||
// check boundary
|
||||
if (match.index > 0) {
|
||||
if (!allowedBoundary(content, match.index-1, state.md.utils)) {
|
||||
console.log("not allowed");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
// check forward boundary as well
|
||||
if (match.index + match[0].length < content.length) {
|
||||
if (!allowedBoundary(content, match.index + match[0].length, state.md.utils)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (match.index > pos) {
|
||||
result = result || [];
|
||||
let token = new state.Token('text', '', 0);
|
||||
token.content = content.slice(pos, match.index);
|
||||
result.push(token);
|
||||
}
|
||||
|
||||
result = result || [];
|
||||
addHashtag(result, match, state);
|
||||
|
||||
pos = match.index + match[0].length;
|
||||
}
|
||||
|
||||
if (result && pos < content.length) {
|
||||
let token = new state.Token('text', '', 0);
|
||||
token.content = content.slice(pos);
|
||||
result.push(token);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function setup(helper) {
|
||||
|
@ -45,14 +100,8 @@ export function setup(helper) {
|
|||
|
||||
helper.registerPlugin(md=>{
|
||||
|
||||
const rule = inlineRegexRule(md, {
|
||||
start: '#',
|
||||
matcher: /^#([\w-:]{1,101})/i,
|
||||
skipInLink: true,
|
||||
maxLength: 102,
|
||||
emitter: emitter
|
||||
});
|
||||
|
||||
md.inline.ruler.push('category-hashtag', rule);
|
||||
md.core.ruler.push('category-hashtag', state => textReplace(
|
||||
state, applyHashtag, true /* skip all links */
|
||||
));
|
||||
});
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ export default null;
|
|||
// matcher: /^#([\w-:]{1,101})/i,
|
||||
// emitter: emitter
|
||||
// });
|
||||
|
||||
export function inlineRegexRule(md, options) {
|
||||
|
||||
const start = options.start.charCodeAt(0);
|
||||
|
@ -27,7 +28,7 @@ export function inlineRegexRule(md, options) {
|
|||
// test prev
|
||||
if (pos > 0) {
|
||||
let prev = state.src.charCodeAt(pos-1);
|
||||
if (!md.utils.isSpace(prev) && !md.utils.isPunctChar(String.fromCharCode(prev))) {
|
||||
if (!md.utils.isWhiteSpace(prev) && !md.utils.isPunctChar(String.fromCharCode(prev))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -38,10 +39,10 @@ export function inlineRegexRule(md, options) {
|
|||
for(i=state.tokens.length-1;i>=0;i--) {
|
||||
let token = state.tokens[i];
|
||||
let type = token.type;
|
||||
if (type === 'link_open' || (type === 'html_inline' && token.content.substr(0,2) === "<a")) {
|
||||
if (type === 'link_open' || (type === 'html_inline' && token.content.substr(0,2).toLowerCase() === "<a")) {
|
||||
return false;
|
||||
}
|
||||
if (type.block || type === 'link_close' || (type === 'html_inline' && token.content.substr(0,3).toLowerCase() === "</a>")) {
|
||||
if (type.block || type === 'link_close' || (type === 'html_inline' && token.content.substr(0,4).toLowerCase() === "</a>")) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -75,10 +76,10 @@ export function inlineRegexRule(md, options) {
|
|||
|
||||
// based off https://github.com/markdown-it/markdown-it-emoji/blob/master/dist/markdown-it-emoji.js
|
||||
//
|
||||
export function textReplace(state, callback) {
|
||||
export function textReplace(state, callback, skipAllLinks) {
|
||||
var i, j, l, tokens, token,
|
||||
blockTokens = state.tokens,
|
||||
autolinkLevel = 0;
|
||||
linkLevel = 0;
|
||||
|
||||
for (j = 0, l = blockTokens.length; j < l; j++) {
|
||||
if (blockTokens[j].type !== 'inline') { continue; }
|
||||
|
@ -89,11 +90,23 @@ export function textReplace(state, callback) {
|
|||
for (i = tokens.length - 1; i >= 0; i--) {
|
||||
token = tokens[i];
|
||||
|
||||
if (token.type === 'link_open' || token.type === 'link_close') {
|
||||
if (token.info === 'auto') { autolinkLevel -= token.nesting; }
|
||||
if (skipAllLinks) {
|
||||
if (token.type === 'link_open' || token.type === 'link_close') {
|
||||
linkLevel -= token.nesting;
|
||||
} else if (token.type === 'html_inline') {
|
||||
if (token.content.substr(0,2).toLowerCase() === "<a") {
|
||||
linkLevel++;
|
||||
} else if (token.content.substr(0,4).toLowerCase() === "</a>") {
|
||||
linkLevel--;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (token.type === 'link_open' || token.type === 'link_close') {
|
||||
if (token.info === 'auto') { linkLevel -= token.nesting; }
|
||||
}
|
||||
}
|
||||
|
||||
if (token.type === 'text' && autolinkLevel === 0) {
|
||||
if (token.type === 'text' && linkLevel === 0) {
|
||||
let split;
|
||||
if(split = callback(token.content, state)) {
|
||||
// replace current node
|
||||
|
|
|
@ -561,6 +561,12 @@ HTML
|
|||
HTML
|
||||
|
||||
expect(cooked).to eq(html.strip)
|
||||
|
||||
# ensure it does not fight with the autolinker
|
||||
expect(PrettyText.cook(' http://somewhere.com/#known')).not_to include('hashtag')
|
||||
expect(PrettyText.cook(' http://somewhere.com/?#known')).not_to include('hashtag')
|
||||
expect(PrettyText.cook(' http://somewhere.com/?abc#known')).not_to include('hashtag')
|
||||
|
||||
end
|
||||
|
||||
it "can handle mixed lists" do
|
||||
|
|
Loading…
Reference in New Issue