diff --git a/kernel/model/file.go b/kernel/model/file.go
index 5f6d1d718..ac116f995 100644
--- a/kernel/model/file.go
+++ b/kernel/model/file.go
@@ -676,7 +676,7 @@ func GetDoc(startID, endID, id string, index int, keyword string, mode int, size
// 支持代码块搜索定位 https://github.com/siyuan-note/siyuan/issues/5520
if ast.NodeCodeBlockCode == n.Type && 0 < len(keywords) && !treenode.IsChartCodeBlockCode(n) {
text := string(n.Tokens)
- text = search.EncloseHighlighting(text, keywords, search.SearchMarkLeft, search.SearchMarkRight, Conf.Search.CaseSensitive)
+ text = search.EncloseHighlighting(text, keywords, search.SearchMarkLeft, search.SearchMarkRight, Conf.Search.CaseSensitive, false)
n.Tokens = gulu.Str.ToBytes(text)
}
diff --git a/kernel/model/search.go b/kernel/model/search.go
index 827813dc2..f6a94beb9 100644
--- a/kernel/model/search.go
+++ b/kernel/model/search.go
@@ -1001,7 +1001,7 @@ func stringQuery(query string) string {
func markReplaceSpan(n *ast.Node, unlinks *[]*ast.Node, keywords []string, markSpanDataType string, luteEngine *lute.Lute) bool {
text := n.Content()
if ast.NodeText == n.Type {
- text = search.EncloseHighlighting(text, keywords, search.GetMarkSpanStart(markSpanDataType), search.GetMarkSpanEnd(), Conf.Search.CaseSensitive)
+ text = search.EncloseHighlighting(text, keywords, search.GetMarkSpanStart(markSpanDataType), search.GetMarkSpanEnd(), Conf.Search.CaseSensitive, false)
n.Tokens = gulu.Str.ToBytes(text)
if bytes.Contains(n.Tokens, []byte(search.MarkDataType)) {
linkTree := parse.Inline("", n.Tokens, luteEngine.ParseOptions)
@@ -1022,7 +1022,7 @@ func markReplaceSpan(n *ast.Node, unlinks *[]*ast.Node, keywords []string, markS
}
startTag := search.GetMarkSpanStart(markSpanDataType)
- text = search.EncloseHighlighting(text, keywords, startTag, search.GetMarkSpanEnd(), Conf.Search.CaseSensitive)
+ text = search.EncloseHighlighting(text, keywords, startTag, search.GetMarkSpanEnd(), Conf.Search.CaseSensitive, false)
if strings.Contains(text, search.MarkDataType) {
dataType := search.GetMarkSpanStart(n.TextMarkType + " " + search.MarkDataType)
text = strings.ReplaceAll(text, startTag, dataType)
@@ -1075,7 +1075,7 @@ func markReplaceSpanWithSplit(text string, keywords []string, replacementStart,
// 虚拟引用和反链提及关键字按最长匹配优先 https://github.com/siyuan-note/siyuan/issues/7465
sort.Slice(keywords, func(i, j int) bool { return len(keywords[i]) > len(keywords[j]) })
- tmp := search.EncloseHighlighting(text, keywords, replacementStart, replacementEnd, Conf.Search.CaseSensitive)
+ tmp := search.EncloseHighlighting(text, keywords, replacementStart, replacementEnd, Conf.Search.CaseSensitive, true)
parts := strings.Split(tmp, replacementEnd)
buf := bytes.Buffer{}
for i := 0; i < len(parts); i++ {
diff --git a/kernel/search/mark.go b/kernel/search/mark.go
index 6296439ee..5e0498564 100644
--- a/kernel/search/mark.go
+++ b/kernel/search/mark.go
@@ -33,7 +33,7 @@ func MarkText(text string, keyword string, beforeLen int, caseSensitive bool) (p
}
text = util.EscapeHTML(text)
keywords := SplitKeyword(keyword)
- marked = EncloseHighlighting(text, keywords, "", "", caseSensitive)
+ marked = EncloseHighlighting(text, keywords, "", "", caseSensitive, false)
pos = strings.Index(marked, "")
if 0 > pos {
@@ -81,14 +81,17 @@ func SplitKeyword(keyword string) (keywords []string) {
return
}
-func EncloseHighlighting(text string, keywords []string, openMark, closeMark string, caseSensitive bool) (ret string) {
+func EncloseHighlighting(text string, keywords []string, openMark, closeMark string, caseSensitive, splitWords bool) (ret string) {
ic := "(?i)"
if caseSensitive {
ic = "(?)"
}
re := ic + "("
for i, k := range keywords {
- wordBoundary := lex.IsASCIILetterNums(gulu.Str.ToBytes(k)) // Improve virtual reference split words https://github.com/siyuan-note/siyuan/issues/7833
+ wordBoundary := false
+ if splitWords {
+ wordBoundary = lex.IsASCIILetterNums(gulu.Str.ToBytes(k)) // Improve virtual reference split words https://github.com/siyuan-note/siyuan/issues/7833
+ }
k = regexp.QuoteMeta(k)
re += "("
if wordBoundary {