This commit is contained in:
Ran Luo 2019-04-04 23:22:04 +08:00 committed by GitHub
parent c590977c09
commit a2786b13d2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 21 additions and 28 deletions

View File

@ -45,7 +45,7 @@ div.ag-show-quick-insert-hint p.ag-paragraph.ag-active > span.ag-line:first-of-t
.ag-reference-marker {
font-size: .9em;
color: var(--editorColor10);
color: var(--editorColor50);
}
.ag-reference-title {

View File

@ -93,7 +93,7 @@ const validateEmphasize = (src, offset, marker, pending) => {
return lowerPriority(src, offset)
}
const tokenizerFac = (src, beginRules, inlineRules, pos = 0, top) => {
const tokenizerFac = (src, beginRules, inlineRules, pos = 0, top, labels) => {
const tokens = []
let pending = ''
let pendingStartPos = pos
@ -211,7 +211,7 @@ const tokenizerFac = (src, beginRules, inlineRules, pos = 0, top) => {
range,
marker,
parent: tokens,
children: tokenizerFac(to[2], undefined, inlineRules, pos + to[1].length, false),
children: tokenizerFac(to[2], undefined, inlineRules, pos + to[1].length, false, labels),
backlash: to[3]
})
src = src.substring(to[0].length)
@ -252,7 +252,7 @@ const tokenizerFac = (src, beginRules, inlineRules, pos = 0, top) => {
range,
marker,
parent: tokens,
children: tokenizerFac(to[2], undefined, inlineRules, pos + to[1].length, false),
children: tokenizerFac(to[2], undefined, inlineRules, pos + to[1].length, false, labels),
backlash: to[3]
})
}
@ -301,7 +301,7 @@ const tokenizerFac = (src, beginRules, inlineRules, pos = 0, top) => {
start: pos,
end: pos + linkTo[0].length
},
children: tokenizerFac(linkTo[2], undefined, inlineRules, pos + linkTo[1].length, false),
children: tokenizerFac(linkTo[2], undefined, inlineRules, pos + linkTo[1].length, false, labels),
backlash: {
first: linkTo[3],
second: linkTo[5]
@ -314,9 +314,8 @@ const tokenizerFac = (src, beginRules, inlineRules, pos = 0, top) => {
}
const rLinkTo = inlineRules['reference_link'].exec(src)
if (rLinkTo && isLengthEven(rLinkTo[2]) && isLengthEven(rLinkTo[4])) {
if (rLinkTo && labels.has(rLinkTo[3] || rLinkTo[1]) && isLengthEven(rLinkTo[2]) && isLengthEven(rLinkTo[4])) {
pushPending()
tokens.push({
type: 'reference_link',
raw: rLinkTo[0],
@ -332,7 +331,7 @@ const tokenizerFac = (src, beginRules, inlineRules, pos = 0, top) => {
start: pos,
end: pos + rLinkTo[0].length
},
children: tokenizerFac(rLinkTo[1], undefined, inlineRules, pos + 1, false)
children: tokenizerFac(rLinkTo[1], undefined, inlineRules, pos + 1, false, labels)
})
src = src.substring(rLinkTo[0].length)
@ -424,7 +423,7 @@ const tokenizerFac = (src, beginRules, inlineRules, pos = 0, top) => {
parent: tokens,
attrs,
content: htmlTo[4],
children: htmlTo[4] ? tokenizerFac(htmlTo[4], undefined, inlineRules, pos + htmlTo[2].length, false) : '',
children: htmlTo[4] ? tokenizerFac(htmlTo[4], undefined, inlineRules, pos + htmlTo[2].length, false, labels) : '',
range: {
start: pos,
end: pos + len
@ -502,8 +501,8 @@ const tokenizerFac = (src, beginRules, inlineRules, pos = 0, top) => {
return tokens
}
export const tokenizer = (src, highlights = [], hasBeginRules = true) => {
const tokens = tokenizerFac(src, hasBeginRules ? beginRules : null, inlineRules, 0, true)
export const tokenizer = (src, highlights = [], hasBeginRules = true, labels = new Map()) => {
const tokens = tokenizerFac(src, hasBeginRules ? beginRules : null, inlineRules, 0, true, labels)
const postTokenizer = tokens => {
for (const token of tokens) {
for (const light of highlights) {

View File

@ -186,6 +186,7 @@ class StateRender {
patch(oldCursorVnode, newCursorVnode)
}
}
this.renderMermaid()
this.renderDiagram()
}

View File

@ -79,7 +79,7 @@ export default function renderLeafBlock (block, cursor, activeBlocks, matches, u
functionType !== 'languageInput'
) {
const hasBeginRules = /^(h\d|span|hr)/.test(type)
tokens = tokenizer(text, highlights, hasBeginRules)
tokens = tokenizer(text, highlights, hasBeginRules, this.labels)
if (highlights.length === 0 && useCache && DEVICE_MEMORY >= 4) {
this.tokenCache.set(text, tokens)
}

View File

@ -15,23 +15,8 @@ export default function referenceLink (h, cursor, block, token, outerClass) {
label
} = token
const MARKER = '['
let href = ''
let title = ''
const key = (label + backlash.second).toLowerCase()
if (this.labels.has(key)) {
({ href, title } = this.labels.get(key))
} else {
// if the link's reference definition is not defined, just show as normal text.#432
return this.highlight(h, block, start, end, token)
}
const backlashStart = start + MARKER.length + anchor.length
const startMarker = this.highlight(
h,
block,
start,
start + MARKER.length,
token
)
const content = [
...children.reduce((acc, to) => {
const chunk = this[snakeToCamel(to.type)](h, cursor, block, to, className)
@ -39,6 +24,14 @@ export default function referenceLink (h, cursor, block, token, outerClass) {
}, []),
...this.backlashInToken(h, backlash.first, className, backlashStart, token)
]
const { href, title } = this.labels.get(key)
const startMarker = this.highlight(
h,
block,
start,
start + MARKER.length,
token
)
const endMarker = this.highlight(
h,
block,

View File

@ -118,7 +118,7 @@ kbd {
border: 1px solid var(--floatBorderColor);
border-radius: 4px;
display: inline-block;
transform: scale(.8);
font-size: .8em;
padding: 0px 5px;
box-shadow: inset 0 -1px 0 var(--floatBorderColor);
}