mirror of
https://github.com/squidfunk/mkdocs-material.git
synced 2024-06-14 11:52:32 +03:00
Merged features tied to Carolina Reaper funding goal
This commit is contained in:
parent
e0dce6cc1d
commit
b550b1a532
@ -103,6 +103,7 @@
|
||||
"dppx",
|
||||
"deg",
|
||||
"em",
|
||||
"fr",
|
||||
"mm",
|
||||
"ms",
|
||||
"px",
|
||||
|
@ -834,20 +834,6 @@
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"include_search_page": {
|
||||
"title": "Only necessary when installing from git",
|
||||
"markdownDescription": "Must be set to `false`",
|
||||
"enum": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"search_index_only": {
|
||||
"title": "Only necessary when installing from git",
|
||||
"markdownDescription": "Must be set to `false`",
|
||||
"enum": [
|
||||
true
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -16,5 +16,5 @@
|
||||
{% endblock %}
|
||||
{% block scripts %}
|
||||
{{ super() }}
|
||||
<script src="{{ 'assets/javascripts/custom.83b17dfb.min.js' | url }}"></script>
|
||||
<script src="{{ 'assets/javascripts/custom.147554b9.min.js' | url }}"></script>
|
||||
{% endblock %}
|
||||
|
29
material/assets/javascripts/bundle.43982a0d.min.js
vendored
Normal file
29
material/assets/javascripts/bundle.43982a0d.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
8
material/assets/javascripts/bundle.43982a0d.min.js.map
Normal file
8
material/assets/javascripts/bundle.43982a0d.min.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
1
material/assets/stylesheets/extra.52c02453.min.css
vendored
Normal file
1
material/assets/stylesheets/extra.52c02453.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
1
material/assets/stylesheets/extra.52c02453.min.css.map
Normal file
1
material/assets/stylesheets/extra.52c02453.min.css.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
1
material/assets/stylesheets/main.91872f81.min.css
vendored
Normal file
1
material/assets/stylesheets/main.91872f81.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
1
material/assets/stylesheets/main.91872f81.min.css.map
Normal file
1
material/assets/stylesheets/main.91872f81.min.css.map
Normal file
File diff suppressed because one or more lines are too long
@ -34,7 +34,7 @@
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
{% block styles %}
|
||||
<link rel="stylesheet" href="{{ 'assets/stylesheets/main.47fa6176.min.css' | url }}">
|
||||
<link rel="stylesheet" href="{{ 'assets/stylesheets/main.91872f81.min.css' | url }}">
|
||||
{% if config.theme.palette %}
|
||||
{% set palette = config.theme.palette %}
|
||||
<link rel="stylesheet" href="{{ 'assets/stylesheets/palette.2505c338.min.css' | url }}">
|
||||
@ -211,7 +211,7 @@
|
||||
"base": base_url,
|
||||
"features": features,
|
||||
"translations": {},
|
||||
"search": "assets/javascripts/workers/search.16e2a7d4.min.js" | url
|
||||
"search": "assets/javascripts/workers/search.cd82efe4.min.js" | url
|
||||
} -%}
|
||||
{%- if config.extra.version -%}
|
||||
{%- set _ = app.update({ "version": config.extra.version }) -%}
|
||||
@ -223,10 +223,6 @@
|
||||
{%- for key in [
|
||||
"clipboard.copy",
|
||||
"clipboard.copied",
|
||||
"search.config.lang",
|
||||
"search.config.pipeline",
|
||||
"search.config.separator",
|
||||
"search.placeholder",
|
||||
"search.result.placeholder",
|
||||
"search.result.none",
|
||||
"search.result.one",
|
||||
@ -243,13 +239,13 @@
|
||||
</script>
|
||||
{% endblock %}
|
||||
{% block scripts %}
|
||||
<script src="{{ 'assets/javascripts/bundle.cef3dc0e.min.js' | url }}"></script>
|
||||
<script src="{{ 'assets/javascripts/bundle.43982a0d.min.js' | url }}"></script>
|
||||
{% for path in config.extra_javascript %}
|
||||
<script src="{{ path | url }}"></script>
|
||||
{% endfor %}
|
||||
{% endblock %}
|
||||
{% if page.meta and page.meta.ᴴₒᴴₒᴴₒ %}
|
||||
<link rel="stylesheet" href="{{ 'assets/stylesheets/extra.c2715e54.min.css' | url }}">
|
||||
<link rel="stylesheet" href="{{ 'assets/stylesheets/extra.52c02453.min.css' | url }}">
|
||||
<script src="{{ 'assets/javascripts/extra/bundle.f719a234.min.js' | url }}" defer></script>
|
||||
{% endif %}
|
||||
</body>
|
||||
|
@ -14,7 +14,6 @@
|
||||
"meta.source": "Πηγή",
|
||||
"nav": "Πλοήγηση",
|
||||
"search": "Αναζήτηση",
|
||||
"search.config.pipeline": "stopWordFilter",
|
||||
"search.placeholder": "Αναζήτηση",
|
||||
"search.share": "Διαμοίραση",
|
||||
"search.reset": "Καθαρισμός",
|
||||
|
@ -20,7 +20,7 @@
|
||||
"nav": "Navigation",
|
||||
"search": "Search",
|
||||
"search.config.lang": "en",
|
||||
"search.config.pipeline": "trimmer, stopWordFilter",
|
||||
"search.config.pipeline": "stopWordFilter",
|
||||
"search.config.separator": "[\\s\\-]+",
|
||||
"search.placeholder": "Search",
|
||||
"search.share": "Share",
|
||||
|
@ -14,7 +14,7 @@
|
||||
"meta.source": "ソース",
|
||||
"nav": "ナビゲーション",
|
||||
"search.config.lang": "ja",
|
||||
"search.config.pipeline": "trimmer, stemmer",
|
||||
"search.config.pipeline": "stemmer",
|
||||
"search.config.separator": "[\\s\\- 、。,.]+",
|
||||
"search.placeholder": "検索",
|
||||
"search.reset": "クリア",
|
||||
|
@ -11,7 +11,7 @@
|
||||
"meta.comments": "評論",
|
||||
"meta.source": "來源",
|
||||
"search.config.lang": "ja",
|
||||
"search.config.pipeline": "trimmer, stemmer",
|
||||
"search.config.pipeline": "stemmer",
|
||||
"search.config.separator": "[\\s\\-,。]+",
|
||||
"search.placeholder": "搜尋",
|
||||
"search.result.initializer": "正在初始化搜尋引擎",
|
||||
|
@ -15,7 +15,7 @@
|
||||
"meta.comments": "留言",
|
||||
"meta.source": "來源",
|
||||
"search.config.lang": "ja",
|
||||
"search.config.pipeline": "trimmer, stemmer",
|
||||
"search.config.pipeline": "stemmer",
|
||||
"search.config.separator": "[\\s\\- 、。,.?;]+",
|
||||
"search.placeholder": "搜尋",
|
||||
"search.result.initializer": "正在初始化搜尋引擎",
|
||||
|
@ -19,7 +19,7 @@
|
||||
"nav": "导航栏",
|
||||
"search": "查找",
|
||||
"search.config.lang": "ja",
|
||||
"search.config.pipeline": "trimmer, stemmer",
|
||||
"search.config.pipeline": "stemmer",
|
||||
"search.config.separator": "[\\s\\-,。]+",
|
||||
"search.placeholder": "搜索",
|
||||
"search.share": "分享",
|
||||
|
0
material/plugins/offline/__init__.py
Normal file
0
material/plugins/offline/__init__.py
Normal file
69
material/plugins/offline/plugin.py
Normal file
69
material/plugins/offline/plugin.py
Normal file
@ -0,0 +1,69 @@
|
||||
# Copyright (c) 2016-2022 Martin Donath <martin.donath@squidfunk.com>
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
# deal in the Software without restriction, including without limitation the
|
||||
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
# sell copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import os
|
||||
|
||||
from mkdocs import utils
|
||||
from mkdocs.config import config_options as opt
|
||||
from mkdocs.config.base import Config
|
||||
from mkdocs.plugins import BasePlugin, event_priority
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Class
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
# Offline plugin configuration scheme
|
||||
class OfflinePluginConfig(Config):
|
||||
enabled = opt.Type(bool, default = True)
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
# Offline plugin
|
||||
class OfflinePlugin(BasePlugin[OfflinePluginConfig]):
|
||||
|
||||
# Initialize plugin
|
||||
def on_config(self, config):
|
||||
if not self.config.enabled:
|
||||
return
|
||||
|
||||
# Ensure correct resolution of links
|
||||
config.use_directory_urls = False
|
||||
|
||||
# Support offline search (run latest)
|
||||
@event_priority(-100)
|
||||
def on_post_build(self, *, config):
|
||||
if not self.config.enabled:
|
||||
return
|
||||
|
||||
# Check for existence of search index
|
||||
base = os.path.join(config.site_dir, "search")
|
||||
path = os.path.join(base, "search_index.json")
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
|
||||
# Retrieve search index
|
||||
with open(path, "r") as data:
|
||||
index = data.read()
|
||||
|
||||
# Inline search index into script
|
||||
utils.write_file(
|
||||
f"var __index = {index}".encode("utf-8"),
|
||||
os.path.join(base, "search_index.js")
|
||||
)
|
@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016-2021 Martin Donath <martin.donath@squidfunk.com>
|
||||
# Copyright (c) 2016-2022 Martin Donath <martin.donath@squidfunk.com>
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to
|
||||
@ -18,54 +18,445 @@
|
||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import regex as re
|
||||
|
||||
from html import escape
|
||||
from html.parser import HTMLParser
|
||||
from mkdocs import utils
|
||||
from mkdocs.commands.build import DuplicateFilter
|
||||
from mkdocs.contrib.search import SearchPlugin as BasePlugin
|
||||
from mkdocs.contrib.search.search_index import SearchIndex as BaseIndex
|
||||
from mkdocs.config import config_options as opt
|
||||
from mkdocs.config.base import Config
|
||||
from mkdocs.contrib.search import LangOption
|
||||
from mkdocs.plugins import BasePlugin
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Class
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
# Search plugin with custom search index
|
||||
class SearchPlugin(BasePlugin):
|
||||
# Search plugin configuration scheme
|
||||
class SearchPluginConfig(Config):
|
||||
lang = opt.Optional(LangOption())
|
||||
separator = opt.Optional(opt.Type(str))
|
||||
pipeline = opt.ListOfItems(
|
||||
opt.Choice(("stemmer", "stopWordFilter", "trimmer")),
|
||||
default = []
|
||||
)
|
||||
|
||||
# Override to use a custom search index
|
||||
def on_pre_build(self, config):
|
||||
super().on_pre_build(config)
|
||||
# Deprecated options
|
||||
indexing = opt.Deprecated(message = "Unsupported option")
|
||||
prebuild_index = opt.Deprecated(message = "Unsupported option")
|
||||
min_search_length = opt.Deprecated(message = "Unsupported option")
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
# Search plugin
|
||||
class SearchPlugin(BasePlugin[SearchPluginConfig]):
|
||||
|
||||
# Determine whether we're running under dirty reload
|
||||
def on_startup(self, *, command, dirty):
|
||||
self.is_dirtyreload = False
|
||||
self.is_dirty = dirty
|
||||
|
||||
# Initialize search index cache
|
||||
self.search_index_prev = None
|
||||
|
||||
# Initialize plugin
|
||||
def on_config(self, config):
|
||||
if not self.config.lang:
|
||||
self.config.lang = [self._translate(
|
||||
config, "search.config.lang"
|
||||
)]
|
||||
|
||||
# Retrieve default value for separator
|
||||
if not self.config.separator:
|
||||
self.config.separator = self._translate(
|
||||
config, "search.config.separator"
|
||||
)
|
||||
|
||||
# Retrieve default value for pipeline
|
||||
if not self.config.pipeline:
|
||||
self.config.pipeline = list(filter(len, re.split(
|
||||
r"\s*,\s*", self._translate(config, "search.config.pipeline")
|
||||
)))
|
||||
|
||||
# Initialize search index
|
||||
self.search_index = SearchIndex(**self.config)
|
||||
|
||||
# Add page to search index
|
||||
def on_page_context(self, context, *, page, config, nav):
|
||||
self.search_index.add_entry_from_context(page)
|
||||
page.content = re.sub(
|
||||
r"\s?data-search-\w+=\"[^\"]+\"",
|
||||
"",
|
||||
page.content
|
||||
)
|
||||
|
||||
# Generate search index
|
||||
def on_post_build(self, *, config):
|
||||
base = os.path.join(config.site_dir, "search")
|
||||
path = os.path.join(base, "search_index.json")
|
||||
|
||||
# Generate and write search index to file
|
||||
data = self.search_index.generate_search_index(self.search_index_prev)
|
||||
utils.write_file(data.encode("utf-8"), path)
|
||||
|
||||
# Persist search index for repeated invocation
|
||||
if self.is_dirty:
|
||||
self.search_index_prev = self.search_index
|
||||
|
||||
# Determine whether we're running under dirty reload
|
||||
def on_serve(self, server, *, config, builder):
|
||||
self.is_dirtyreload = self.is_dirty
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
# Translate the given placeholder value
|
||||
def _translate(self, config, value):
|
||||
env = config.theme.get_env()
|
||||
|
||||
# Load language template and return translation for placeholder
|
||||
language = "partials/language.html"
|
||||
template = env.get_template(language, None, { "config": config })
|
||||
return template.module.t(value)
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
# Search index with support for additional fields
|
||||
class SearchIndex(BaseIndex):
|
||||
class SearchIndex:
|
||||
|
||||
# Override to add additional fields for each page
|
||||
# Initialize search index
|
||||
def __init__(self, **config):
|
||||
self.config = config
|
||||
self.entries = []
|
||||
|
||||
# Add page to search index
|
||||
def add_entry_from_context(self, page):
|
||||
index = len(self._entries)
|
||||
super().add_entry_from_context(page)
|
||||
search = page.meta.get("search", {})
|
||||
if search.get("exclude"):
|
||||
return
|
||||
|
||||
# Add document tags, if any
|
||||
if page.meta.get("tags"):
|
||||
if type(page.meta["tags"]) is list:
|
||||
entry = self._entries[index]
|
||||
entry["tags"] = [
|
||||
str(tag) for tag in page.meta["tags"]
|
||||
]
|
||||
# Divide page content into sections
|
||||
parser = Parser()
|
||||
parser.feed(page.content)
|
||||
parser.close()
|
||||
|
||||
# Add sections to index
|
||||
for section in parser.data:
|
||||
if not section.is_excluded():
|
||||
self.create_entry_for_section(section, page.toc, page.url, page)
|
||||
|
||||
# Override: graceful indexing and additional fields
|
||||
def create_entry_for_section(self, section, toc, url, page):
|
||||
item = self._find_toc_by_id(toc, section.id)
|
||||
if item:
|
||||
url = url + item.url
|
||||
elif section.id:
|
||||
url = url + "#" + section.id
|
||||
|
||||
# Set page title as section title if none was given, which happens when
|
||||
# the first headline in a Markdown document is not a h1 headline. Also,
|
||||
# if a page title was set via front matter, use that even though a h1
|
||||
# might be given or the page name was specified in nav in mkdocs.yml
|
||||
if not section.title:
|
||||
section.title = page.meta.get("title", page.title)
|
||||
|
||||
# Compute title and text
|
||||
title = "".join(section.title).strip()
|
||||
text = "".join(section.text).strip()
|
||||
|
||||
# Reset text, if only titles should be indexed
|
||||
if self.config["indexing"] == "titles":
|
||||
text = ""
|
||||
|
||||
# Create entry for section
|
||||
entry = {
|
||||
"title": title,
|
||||
"text": text,
|
||||
"location": url
|
||||
}
|
||||
|
||||
# Set document tags
|
||||
tags = page.meta.get("tags")
|
||||
if isinstance(tags, list):
|
||||
entry["tags"] = []
|
||||
for name in tags:
|
||||
if name and isinstance(name, (str, int, float, bool)):
|
||||
entry["tags"].append(name)
|
||||
|
||||
# Set document boost
|
||||
search = page.meta.get("search", {})
|
||||
if "boost" in search:
|
||||
entry["boost"] = search["boost"]
|
||||
|
||||
# Add entry to index
|
||||
self.entries.append(entry)
|
||||
|
||||
# Generate search index
|
||||
def generate_search_index(self, prev):
|
||||
config = {
|
||||
key: self.config[key]
|
||||
for key in ["lang", "separator", "pipeline"]
|
||||
}
|
||||
|
||||
# Hack: if we're running under dirty reload, the search index will only
|
||||
# include the entries for the current page. However, MkDocs > 1.4 allows
|
||||
# us to persist plugin state across rebuilds, which is exactly what we
|
||||
# do by passing the previously built index to this method. Thus, we just
|
||||
# remove the previous entries for the current page, and append the new
|
||||
# entries to the end of the index, as order doesn't matter.
|
||||
if prev and self.entries:
|
||||
path = self.entries[0]["location"]
|
||||
|
||||
# Since we're sure that we're running under dirty reload, the list
|
||||
# of entries will only contain sections for a single page. Thus, we
|
||||
# use the first entry to remove all entries from the previous run
|
||||
# that belong to the current page. The rationale behind this is that
|
||||
# authors might add or remove section headers, so we need to make
|
||||
# sure that sections are synchronized correctly.
|
||||
entries = [
|
||||
entry for entry in prev.entries
|
||||
if not entry["location"].startswith(path)
|
||||
]
|
||||
|
||||
# Merge previous with current entries
|
||||
self.entries = entries + self.entries
|
||||
|
||||
# Otherwise just set previous entries
|
||||
if prev and not self.entries:
|
||||
self.entries = prev.entries
|
||||
|
||||
# Return search index as JSON
|
||||
data = { "config": config, "docs": self.entries }
|
||||
return json.dumps(
|
||||
data,
|
||||
separators = (",", ":"),
|
||||
default = str
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
# Retrieve item for anchor
|
||||
def _find_toc_by_id(self, toc, id):
|
||||
for toc_item in toc:
|
||||
if toc_item.id == id:
|
||||
return toc_item
|
||||
|
||||
# Recurse into children of item
|
||||
toc_item = self._find_toc_by_id(toc_item.children, id)
|
||||
if toc_item is not None:
|
||||
return toc_item
|
||||
|
||||
# No item found
|
||||
return None
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
# HTML element
|
||||
class Element:
|
||||
"""
|
||||
An element with attributes, essentially a small wrapper object for the
|
||||
parser to access attributes in other callbacks than handle_starttag.
|
||||
"""
|
||||
|
||||
# Initialize HTML element
|
||||
def __init__(self, tag, attrs = dict()):
|
||||
self.tag = tag
|
||||
self.attrs = attrs
|
||||
|
||||
# Support comparison (compare by tag only)
|
||||
def __eq__(self, other):
|
||||
if other is Element:
|
||||
return self.tag == other.tag
|
||||
else:
|
||||
return self.tag == other
|
||||
|
||||
# Support set operations
|
||||
def __hash__(self):
|
||||
return hash(self.tag)
|
||||
|
||||
# Check whether the element should be excluded
|
||||
def is_excluded(self):
|
||||
return "data-search-exclude" in self.attrs
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
# HTML section
|
||||
class Section:
|
||||
"""
|
||||
A block of text with markup, preceded by a title (with markup), i.e., a
|
||||
headline with a certain level (h1-h6). Internally used by the parser.
|
||||
"""
|
||||
|
||||
# Initialize HTML section
|
||||
def __init__(self, el):
|
||||
self.el = el
|
||||
self.text = []
|
||||
self.title = []
|
||||
self.id = None
|
||||
|
||||
# Check whether the section should be excluded
|
||||
def is_excluded(self):
|
||||
return self.el.is_excluded()
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
# HTML parser
|
||||
class Parser(HTMLParser):
|
||||
"""
|
||||
This parser divides the given string of HTML into a list of sections, each
|
||||
of which are preceded by a h1-h6 level heading. A white- and blacklist of
|
||||
tags dictates which tags should be preserved as part of the index, and
|
||||
which should be ignored in their entirety.
|
||||
"""
|
||||
|
||||
# Initialize HTML parser
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# Tags to skip
|
||||
self.skip = set([
|
||||
"object", # Objects
|
||||
"script", # Scripts
|
||||
"style" # Styles
|
||||
])
|
||||
|
||||
# Tags to keep
|
||||
self.keep = set([
|
||||
"p", # Paragraphs
|
||||
"code", "pre", # Code blocks
|
||||
"li", "ol", "ul" # Lists
|
||||
])
|
||||
|
||||
# Current context and section
|
||||
self.context = []
|
||||
self.section = None
|
||||
|
||||
# All parsed sections
|
||||
self.data = []
|
||||
|
||||
# Called at the start of every HTML tag
|
||||
def handle_starttag(self, tag, attrs):
|
||||
attrs = dict(attrs)
|
||||
|
||||
# Ignore self-closing tags
|
||||
el = Element(tag, attrs)
|
||||
if not tag in void:
|
||||
self.context.append(el)
|
||||
else:
|
||||
return
|
||||
|
||||
# Handle headings
|
||||
if tag in ([f"h{x}" for x in range(1, 7)]):
|
||||
if "id" in attrs:
|
||||
|
||||
# Ensure top-level section
|
||||
if tag != "h1" and not self.data:
|
||||
self.section = Section(Element("hx"))
|
||||
self.data.append(self.section)
|
||||
|
||||
# Set identifier, if not first section
|
||||
self.section = Section(el)
|
||||
if self.data:
|
||||
self.section.id = attrs["id"]
|
||||
|
||||
# Append section to list
|
||||
self.data.append(self.section)
|
||||
|
||||
# Handle preface - ensure top-level section
|
||||
if not self.section:
|
||||
self.section = Section(Element("hx"))
|
||||
self.data.append(self.section)
|
||||
|
||||
# Handle special cases to skip
|
||||
for key, value in attrs.items():
|
||||
|
||||
# Skip block if explicitly excluded from search
|
||||
if key == "data-search-exclude":
|
||||
self.skip.add(el)
|
||||
return
|
||||
|
||||
# Skip line numbers - see https://bit.ly/3GvubZx
|
||||
if key == "class" and value == "linenodiv":
|
||||
self.skip.add(el)
|
||||
return
|
||||
|
||||
# Render opening tag if kept
|
||||
if not self.skip.intersection(self.context):
|
||||
if tag in self.keep:
|
||||
data = self.section.text
|
||||
if self.section.el in reversed(self.context):
|
||||
data = self.section.title
|
||||
|
||||
# Append to section title or text
|
||||
data.append(f"<{tag}>")
|
||||
|
||||
# Called at the end of every HTML tag
|
||||
def handle_endtag(self, tag):
|
||||
if not self.context or self.context[-1] != tag:
|
||||
return
|
||||
|
||||
# Remove element from skip list
|
||||
el = self.context.pop()
|
||||
if el in self.skip:
|
||||
self.skip.remove(el)
|
||||
return
|
||||
|
||||
# Render closing tag if kept
|
||||
if not self.skip.intersection(self.context):
|
||||
if tag in self.keep:
|
||||
data = self.section.text
|
||||
if self.section.el in reversed(self.context):
|
||||
data = self.section.title
|
||||
|
||||
# Remove element if empty (or only whitespace)
|
||||
prev, last = data[-2:]
|
||||
if last == f"<{tag}>":
|
||||
del data[len(data) - 1:]
|
||||
elif last.isspace() and prev == f"<{tag}>":
|
||||
del data[len(data) - 2:]
|
||||
|
||||
# Append to section title or text
|
||||
else:
|
||||
data.append(f"</{tag}>")
|
||||
|
||||
# Called for the text contents of each tag
|
||||
def handle_data(self, data):
|
||||
if self.skip.intersection(self.context):
|
||||
return
|
||||
|
||||
# Collapse whitespace in non-pre contexts
|
||||
if not "pre" in self.context:
|
||||
if not data.isspace():
|
||||
data = data.replace("\n", " ")
|
||||
else:
|
||||
log.warning(
|
||||
"Skipping 'tags' due to invalid syntax [%s]: %s",
|
||||
page.file.src_uri,
|
||||
page.meta["tags"]
|
||||
data = " "
|
||||
|
||||
# Handle preface - ensure top-level section
|
||||
if not self.section:
|
||||
self.section = Section(Element("hx"))
|
||||
self.data.append(self.section)
|
||||
|
||||
# Handle section headline
|
||||
if self.section.el in reversed(self.context):
|
||||
permalink = False
|
||||
for el in self.context:
|
||||
if el.tag == "a" and el.attrs.get("class") == "headerlink":
|
||||
permalink = True
|
||||
|
||||
# Ignore permalinks
|
||||
if not permalink:
|
||||
self.section.title.append(
|
||||
escape(data, quote = False)
|
||||
)
|
||||
|
||||
# Add document boost for search
|
||||
if "search" in page.meta:
|
||||
search = page.meta["search"]
|
||||
if "boost" in search:
|
||||
for entry in self._entries[index:]:
|
||||
entry["boost"] = search["boost"]
|
||||
# Handle everything else
|
||||
else:
|
||||
self.section.text.append(
|
||||
escape(data, quote = False)
|
||||
)
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Data
|
||||
@ -74,3 +465,21 @@ class SearchIndex(BaseIndex):
|
||||
# Set up logging
|
||||
log = logging.getLogger("mkdocs")
|
||||
log.addFilter(DuplicateFilter())
|
||||
|
||||
# Tags that are self-closing
|
||||
void = set([
|
||||
"area", # Image map areas
|
||||
"base", # Document base
|
||||
"br", # Line breaks
|
||||
"col", # Table columns
|
||||
"embed", # External content
|
||||
"hr", # Horizontal rules
|
||||
"img", # Images
|
||||
"input", # Input fields
|
||||
"link", # Links
|
||||
"meta", # Metadata
|
||||
"param", # External parameters
|
||||
"source", # Image source sets
|
||||
"track", # Text track
|
||||
"wbr" # Line break opportunities
|
||||
])
|
||||
|
@ -80,7 +80,7 @@ class SocialPlugin(BasePlugin[SocialPluginConfig]):
|
||||
"Required dependencies of \"social\" plugin not found. "
|
||||
"Install with: pip install pillow cairosvg"
|
||||
)
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
# Check if site URL is defined
|
||||
if not config.site_url:
|
||||
|
@ -92,7 +92,7 @@ class TagsPlugin(BasePlugin[TagsPluginConfig]):
|
||||
file = files.get_file_from_path(path)
|
||||
if not file:
|
||||
log.error(f"Tags file '{path}' does not exist.")
|
||||
sys.exit()
|
||||
sys.exit(1)
|
||||
|
||||
# Add tags file to files
|
||||
files.append(file)
|
||||
|
@ -81,7 +81,8 @@ theme:
|
||||
|
||||
# Plugins
|
||||
plugins:
|
||||
- search
|
||||
- search:
|
||||
separator: '[\s\u200b,:!=\[\]()"`/]+|\.(?!\d)|&[lg]t;|(?!\b)(?=[A-Z][a-z])'
|
||||
- redirects:
|
||||
redirect_maps:
|
||||
changelog/insiders.md: insiders/changelog.md
|
||||
|
14
package-lock.json
generated
14
package-lock.json
generated
@ -14,6 +14,7 @@
|
||||
"escape-html": "^1.0.3",
|
||||
"focus-visible": "^5.2.0",
|
||||
"fuzzaldrin-plus": "^0.6.0",
|
||||
"iframe-worker": "^1.0.0",
|
||||
"lunr": "^2.3.9",
|
||||
"lunr-languages": "^1.10.0",
|
||||
"resize-observer-polyfill": "^1.5.1",
|
||||
@ -5733,6 +5734,14 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/iframe-worker": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/iframe-worker/-/iframe-worker-1.0.0.tgz",
|
||||
"integrity": "sha512-kZcAynPvvsaMUh7nj89dCi6dmyjwgX6mlg3y28IUF1gdQpPX44+l0MP+4UFChfQmCdMy01EPkJ+joNuXOh0eWQ==",
|
||||
"engines": {
|
||||
"node": ">= 16"
|
||||
}
|
||||
},
|
||||
"node_modules/ignore": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.1.tgz",
|
||||
@ -17764,6 +17773,11 @@
|
||||
"safer-buffer": ">= 2.1.2 < 3.0.0"
|
||||
}
|
||||
},
|
||||
"iframe-worker": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/iframe-worker/-/iframe-worker-1.0.0.tgz",
|
||||
"integrity": "sha512-kZcAynPvvsaMUh7nj89dCi6dmyjwgX6mlg3y28IUF1gdQpPX44+l0MP+4UFChfQmCdMy01EPkJ+joNuXOh0eWQ=="
|
||||
},
|
||||
"ignore": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.1.tgz",
|
||||
|
@ -44,6 +44,7 @@
|
||||
"escape-html": "^1.0.3",
|
||||
"focus-visible": "^5.2.0",
|
||||
"fuzzaldrin-plus": "^0.6.0",
|
||||
"iframe-worker": "^1.0.0",
|
||||
"lunr": "^2.3.9",
|
||||
"lunr-languages": "^1.10.0",
|
||||
"resize-observer-polyfill": "^1.5.1",
|
||||
|
@ -51,6 +51,7 @@ classifiers = [
|
||||
]
|
||||
|
||||
[project.entry-points."mkdocs.plugins"]
|
||||
"material/offline" = "material.plugins.search.plugin:OfflinePlugin"
|
||||
"material/search" = "material.plugins.search.plugin:SearchPlugin"
|
||||
"material/social" = "material.plugins.social.plugin:SocialPlugin"
|
||||
"material/tags" = "material.plugins.tags.plugin:TagsPlugin"
|
||||
|
@ -24,7 +24,8 @@ markdown>=3.2
|
||||
mkdocs>=1.4.2
|
||||
mkdocs-material-extensions>=1.1
|
||||
pygments>=2.12
|
||||
pymdown-extensions>=9.4
|
||||
pymdown-extensions>=9.6
|
||||
|
||||
# Requirements for plugins
|
||||
regex>=2022.4.24
|
||||
requests>=2.26
|
||||
|
@ -58,10 +58,6 @@ export type Flag =
|
||||
export type Translation =
|
||||
| "clipboard.copy" /* Copy to clipboard */
|
||||
| "clipboard.copied" /* Copied to clipboard */
|
||||
| "search.config.lang" /* Search language */
|
||||
| "search.config.pipeline" /* Search pipeline */
|
||||
| "search.config.separator" /* Search separator */
|
||||
| "search.placeholder" /* Search */
|
||||
| "search.result.placeholder" /* Type to start searching */
|
||||
| "search.result.none" /* No matching documents */
|
||||
| "search.result.one" /* 1 matching document */
|
||||
@ -74,7 +70,8 @@ export type Translation =
|
||||
/**
|
||||
* Translations
|
||||
*/
|
||||
export type Translations = Record<Translation, string>
|
||||
export type Translations =
|
||||
Record<Translation, string>
|
||||
|
||||
/* ------------------------------------------------------------------------- */
|
||||
|
||||
|
@ -55,7 +55,7 @@ export function getElementContainer(
|
||||
let parent = el.parentElement
|
||||
while (parent)
|
||||
if (
|
||||
el.scrollWidth <= parent.scrollWidth &&
|
||||
el.scrollWidth <= parent.scrollWidth &&
|
||||
el.scrollHeight <= parent.scrollHeight
|
||||
)
|
||||
parent = (el = parent).parentElement
|
||||
|
@ -21,11 +21,15 @@
|
||||
*/
|
||||
|
||||
import {
|
||||
EMPTY,
|
||||
Observable,
|
||||
filter,
|
||||
fromEvent,
|
||||
map,
|
||||
share
|
||||
merge,
|
||||
share,
|
||||
startWith,
|
||||
switchMap
|
||||
} from "rxjs"
|
||||
|
||||
import { getActiveElement } from "../element"
|
||||
@ -93,13 +97,28 @@ function isSusceptibleToKeyboard(
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Watch composition events
|
||||
*
|
||||
* @returns Composition observable
|
||||
*/
|
||||
export function watchComposition(): Observable<boolean> {
|
||||
return merge(
|
||||
fromEvent(window, "compositionstart").pipe(map(() => true)),
|
||||
fromEvent(window, "compositionend").pipe(map(() => false))
|
||||
)
|
||||
.pipe(
|
||||
startWith(false)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Watch keyboard
|
||||
*
|
||||
* @returns Keyboard observable
|
||||
*/
|
||||
export function watchKeyboard(): Observable<Keyboard> {
|
||||
return fromEvent<KeyboardEvent>(window, "keydown")
|
||||
const keyboard$ = fromEvent<KeyboardEvent>(window, "keydown")
|
||||
.pipe(
|
||||
filter(ev => !(ev.metaKey || ev.ctrlKey)),
|
||||
map(ev => ({
|
||||
@ -120,4 +139,10 @@ export function watchKeyboard(): Observable<Keyboard> {
|
||||
}),
|
||||
share()
|
||||
)
|
||||
|
||||
/* Don't emit during composition events - see https://bit.ly/3te3Wl8 */
|
||||
return watchComposition()
|
||||
.pipe(
|
||||
switchMap(active => !active ? keyboard$ : EMPTY)
|
||||
)
|
||||
}
|
||||
|
@ -60,6 +60,8 @@ export function request(
|
||||
)
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Fetch JSON from the given URL
|
||||
*
|
||||
|
@ -42,7 +42,7 @@ import { h } from "~/utilities"
|
||||
* Create and load a `script` element
|
||||
*
|
||||
* This function returns an observable that will emit when the script was
|
||||
* successfully loaded, or throw an error if it didn't.
|
||||
* successfully loaded, or throw an error if it wasn't.
|
||||
*
|
||||
* @param src - Script URL
|
||||
*
|
||||
|
@ -20,15 +20,16 @@
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
import "iframe-worker/shim"
|
||||
import {
|
||||
Observable,
|
||||
Subject,
|
||||
endWith,
|
||||
fromEvent,
|
||||
map,
|
||||
ignoreElements,
|
||||
mergeWith,
|
||||
share,
|
||||
switchMap,
|
||||
tap,
|
||||
throttle
|
||||
takeUntil
|
||||
} from "rxjs"
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
@ -43,29 +44,38 @@ export interface WorkerMessage {
|
||||
data?: unknown /* Message data */
|
||||
}
|
||||
|
||||
/**
|
||||
* Worker handler
|
||||
*
|
||||
* @template T - Message type
|
||||
*/
|
||||
export interface WorkerHandler<
|
||||
T extends WorkerMessage
|
||||
> {
|
||||
tx$: Subject<T> /* Message transmission subject */
|
||||
rx$: Observable<T> /* Message receive observable */
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Helper types
|
||||
* Helper functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Watch options
|
||||
* Create an observable for receiving from a web worker
|
||||
*
|
||||
* @template T - Worker message type
|
||||
* @template T - Data type
|
||||
*
|
||||
* @param worker - Web worker
|
||||
*
|
||||
* @returns Message observable
|
||||
*/
|
||||
interface WatchOptions<T extends WorkerMessage> {
|
||||
tx$: Observable<T> /* Message transmission observable */
|
||||
function recv<T>(worker: Worker): Observable<T> {
|
||||
return fromEvent<MessageEvent<T>, T>(worker, "message", ev => ev.data)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a subject for sending to a web worker
|
||||
*
|
||||
* @template T - Data type
|
||||
*
|
||||
* @param worker - Web worker
|
||||
*
|
||||
* @returns Message subject
|
||||
*/
|
||||
function send<T>(worker: Worker): Subject<T> {
|
||||
const send$ = new Subject<T>()
|
||||
send$.subscribe(data => worker.postMessage(data))
|
||||
|
||||
/* Return message subject */
|
||||
return send$
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
@ -73,34 +83,31 @@ interface WatchOptions<T extends WorkerMessage> {
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Watch a web worker
|
||||
* Create a bidirectional communication channel to a web worker
|
||||
*
|
||||
* This function returns an observable that sends all values emitted by the
|
||||
* message observable to the web worker. Web worker communication is expected
|
||||
* to be bidirectional (request-response) and synchronous. Messages that are
|
||||
* emitted during a pending request are throttled, the last one is emitted.
|
||||
* @template T - Data type
|
||||
*
|
||||
* @param worker - Web worker
|
||||
* @param options - Options
|
||||
* @param url - Worker URL
|
||||
* @param worker - Worker
|
||||
*
|
||||
* @returns Worker message observable
|
||||
* @returns Worker subject
|
||||
*/
|
||||
export function watchWorker<T extends WorkerMessage>(
|
||||
worker: Worker, { tx$ }: WatchOptions<T>
|
||||
): Observable<T> {
|
||||
url: string, worker = new Worker(url)
|
||||
): Subject<T> {
|
||||
const recv$ = recv<T>(worker)
|
||||
const send$ = send<T>(worker)
|
||||
|
||||
/* Intercept messages from worker-like objects */
|
||||
const rx$ = fromEvent<MessageEvent>(worker, "message")
|
||||
.pipe(
|
||||
map(({ data }) => data as T)
|
||||
)
|
||||
/* Create worker subject and forward messages */
|
||||
const worker$ = new Subject<T>()
|
||||
worker$.subscribe(send$)
|
||||
|
||||
/* Send and receive messages, return hot observable */
|
||||
return tx$
|
||||
/* Return worker subject */
|
||||
const done$ = send$.pipe(ignoreElements(), endWith(true))
|
||||
return worker$
|
||||
.pipe(
|
||||
throttle(() => rx$, { leading: true, trailing: true }),
|
||||
tap(message => worker.postMessage(message)),
|
||||
switchMap(() => rx$),
|
||||
ignoreElements(),
|
||||
mergeWith(recv$.pipe(takeUntil(done$))),
|
||||
share()
|
||||
)
|
||||
) as Subject<T>
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ import "url-polyfill"
|
||||
import {
|
||||
EMPTY,
|
||||
NEVER,
|
||||
Observable,
|
||||
Subject,
|
||||
defer,
|
||||
delay,
|
||||
@ -51,6 +52,7 @@ import {
|
||||
watchLocationTarget,
|
||||
watchMedia,
|
||||
watchPrint,
|
||||
watchScript,
|
||||
watchViewport
|
||||
} from "./browser"
|
||||
import {
|
||||
@ -86,6 +88,32 @@ import {
|
||||
} from "./patches"
|
||||
import "./polyfills"
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Functions - @todo refactor
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Fetch search index
|
||||
*
|
||||
* @returns Search index observable
|
||||
*/
|
||||
function fetchSearchIndex(): Observable<SearchIndex> {
|
||||
if (location.protocol === "file:") {
|
||||
return watchScript(
|
||||
`${new URL("search/search_index.js", config.base)}`
|
||||
)
|
||||
.pipe(
|
||||
// @ts-ignore - @todo fix typings
|
||||
map(() => __index),
|
||||
shareReplay(1)
|
||||
)
|
||||
} else {
|
||||
return requestJSON<SearchIndex>(
|
||||
new URL("search/search_index.json", config.base)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Application
|
||||
* ------------------------------------------------------------------------- */
|
||||
@ -109,9 +137,7 @@ const print$ = watchPrint()
|
||||
/* Retrieve search index, if search is enabled */
|
||||
const config = configuration()
|
||||
const index$ = document.forms.namedItem("search")
|
||||
? __search?.index || requestJSON<SearchIndex>(
|
||||
new URL("search/search_index.json", config.base)
|
||||
)
|
||||
? fetchSearchIndex()
|
||||
: NEVER
|
||||
|
||||
/* Set up Clipboard.js integration */
|
||||
|
@ -29,14 +29,15 @@ import {
|
||||
debounceTime,
|
||||
defer,
|
||||
delay,
|
||||
endWith,
|
||||
filter,
|
||||
finalize,
|
||||
fromEvent,
|
||||
ignoreElements,
|
||||
map,
|
||||
merge,
|
||||
switchMap,
|
||||
take,
|
||||
takeLast,
|
||||
takeUntil,
|
||||
tap,
|
||||
throttleTime,
|
||||
@ -136,7 +137,7 @@ export function mountAnnotation(
|
||||
/* Mount component on subscription */
|
||||
return defer(() => {
|
||||
const push$ = new Subject<Annotation>()
|
||||
const done$ = push$.pipe(takeLast(1))
|
||||
const done$ = push$.pipe(ignoreElements(), endWith(true))
|
||||
push$.subscribe({
|
||||
|
||||
/* Handle emission */
|
||||
|
@ -25,10 +25,11 @@ import {
|
||||
Observable,
|
||||
Subject,
|
||||
defer,
|
||||
endWith,
|
||||
finalize,
|
||||
ignoreElements,
|
||||
merge,
|
||||
share,
|
||||
takeLast,
|
||||
takeUntil
|
||||
} from "rxjs"
|
||||
|
||||
@ -167,7 +168,7 @@ export function mountAnnotationList(
|
||||
/* Handle print mode - see https://bit.ly/3rgPdpt */
|
||||
print$
|
||||
.pipe(
|
||||
takeUntil(done$.pipe(takeLast(1)))
|
||||
takeUntil(done$.pipe(ignoreElements(), endWith(true)))
|
||||
)
|
||||
.subscribe(active => {
|
||||
el.hidden = !active
|
||||
|
@ -28,8 +28,10 @@ import {
|
||||
auditTime,
|
||||
combineLatest,
|
||||
defer,
|
||||
endWith,
|
||||
finalize,
|
||||
fromEvent,
|
||||
ignoreElements,
|
||||
map,
|
||||
merge,
|
||||
skip,
|
||||
@ -135,7 +137,7 @@ export function mountContentTabs(
|
||||
const container = getElement(".tabbed-labels", el)
|
||||
return defer(() => {
|
||||
const push$ = new Subject<ContentTabs>()
|
||||
const done$ = push$.pipe(takeLast(1))
|
||||
const done$ = push$.pipe(ignoreElements(), endWith(true))
|
||||
combineLatest([push$, watchElementSize(el)])
|
||||
.pipe(
|
||||
auditTime(1, animationFrameScheduler),
|
||||
|
@ -29,13 +29,14 @@ import {
|
||||
defer,
|
||||
distinctUntilChanged,
|
||||
distinctUntilKeyChanged,
|
||||
endWith,
|
||||
filter,
|
||||
ignoreElements,
|
||||
map,
|
||||
of,
|
||||
shareReplay,
|
||||
startWith,
|
||||
switchMap,
|
||||
takeLast,
|
||||
takeUntil
|
||||
} from "rxjs"
|
||||
|
||||
@ -175,7 +176,7 @@ export function mountHeader(
|
||||
): Observable<Component<Header>> {
|
||||
return defer(() => {
|
||||
const push$ = new Subject<Main>()
|
||||
const done$ = push$.pipe(takeLast(1))
|
||||
const done$ = push$.pipe(ignoreElements(), endWith(true))
|
||||
push$
|
||||
.pipe(
|
||||
distinctUntilKeyChanged("active"),
|
||||
|
@ -26,9 +26,7 @@ import {
|
||||
ObservableInput,
|
||||
filter,
|
||||
merge,
|
||||
mergeWith,
|
||||
sample,
|
||||
take
|
||||
mergeWith
|
||||
} from "rxjs"
|
||||
|
||||
import { configuration } from "~/_"
|
||||
@ -41,8 +39,6 @@ import {
|
||||
import {
|
||||
SearchIndex,
|
||||
SearchResult,
|
||||
isSearchQueryMessage,
|
||||
isSearchReadyMessage,
|
||||
setupSearchWorker
|
||||
} from "~/integrations"
|
||||
|
||||
@ -110,23 +106,12 @@ export function mountSearch(
|
||||
): Observable<Component<Search>> {
|
||||
const config = configuration()
|
||||
try {
|
||||
const url = __search?.worker || config.search
|
||||
const worker = setupSearchWorker(url, index$)
|
||||
const worker$ = setupSearchWorker(config.search, index$)
|
||||
|
||||
/* Retrieve query and result components */
|
||||
const query = getComponentElement("search-query", el)
|
||||
const result = getComponentElement("search-result", el)
|
||||
|
||||
/* Re-emit query when search is ready */
|
||||
const { tx$, rx$ } = worker
|
||||
tx$
|
||||
.pipe(
|
||||
filter(isSearchQueryMessage),
|
||||
sample(rx$.pipe(filter(isSearchReadyMessage))),
|
||||
take(1)
|
||||
)
|
||||
.subscribe(tx$.next.bind(tx$))
|
||||
|
||||
/* Set up search keyboard handlers */
|
||||
keyboard$
|
||||
.pipe(
|
||||
@ -199,7 +184,7 @@ export function mountSearch(
|
||||
/* Set up global keyboard handlers */
|
||||
keyboard$
|
||||
.pipe(
|
||||
filter(({ mode }) => mode === "global"),
|
||||
filter(({ mode }) => mode === "global")
|
||||
)
|
||||
.subscribe(key => {
|
||||
switch (key.type) {
|
||||
@ -218,9 +203,11 @@ export function mountSearch(
|
||||
})
|
||||
|
||||
/* Create and return component */
|
||||
const query$ = mountSearchQuery(query, worker)
|
||||
const result$ = mountSearchResult(result, worker, { query$ })
|
||||
return merge(query$, result$)
|
||||
const query$ = mountSearchQuery(query, { worker$ })
|
||||
return merge(
|
||||
query$,
|
||||
mountSearchResult(result, { worker$, query$ })
|
||||
)
|
||||
.pipe(
|
||||
mergeWith(
|
||||
|
||||
@ -230,7 +217,7 @@ export function mountSearch(
|
||||
|
||||
/* Search suggestions */
|
||||
...getComponentElements("search-suggest", el)
|
||||
.map(child => mountSearchSuggest(child, worker, { keyboard$ }))
|
||||
.map(child => mountSearchSuggest(child, { worker$, keyboard$ }))
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -85,7 +85,7 @@ export function mountSearchHiglight(
|
||||
)
|
||||
])
|
||||
.pipe(
|
||||
map(([index, url]) => setupSearchHighlighter(index.config, true)(
|
||||
map(([index, url]) => setupSearchHighlighter(index.config)(
|
||||
url.searchParams.get("h")!
|
||||
)),
|
||||
map(fn => {
|
||||
|
@ -24,24 +24,20 @@ import {
|
||||
Observable,
|
||||
Subject,
|
||||
combineLatest,
|
||||
delay,
|
||||
distinctUntilChanged,
|
||||
distinctUntilKeyChanged,
|
||||
filter,
|
||||
endWith,
|
||||
finalize,
|
||||
first,
|
||||
fromEvent,
|
||||
ignoreElements,
|
||||
map,
|
||||
merge,
|
||||
share,
|
||||
shareReplay,
|
||||
startWith,
|
||||
take,
|
||||
takeLast,
|
||||
takeUntil,
|
||||
tap
|
||||
} from "rxjs"
|
||||
|
||||
import { translation } from "~/_"
|
||||
import {
|
||||
getLocation,
|
||||
setToggle,
|
||||
@ -49,10 +45,8 @@ import {
|
||||
watchToggle
|
||||
} from "~/browser"
|
||||
import {
|
||||
SearchMessage,
|
||||
SearchMessageType,
|
||||
SearchQueryMessage,
|
||||
SearchWorker,
|
||||
defaultTransform,
|
||||
isSearchReadyMessage
|
||||
} from "~/integrations"
|
||||
|
||||
@ -70,6 +64,24 @@ export interface SearchQuery {
|
||||
focus: boolean /* Query focus */
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Helper types
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Watch options
|
||||
*/
|
||||
interface WatchOptions {
|
||||
worker$: Subject<SearchMessage> /* Search worker */
|
||||
}
|
||||
|
||||
/**
|
||||
* Mount options
|
||||
*/
|
||||
interface MountOptions {
|
||||
worker$: Subject<SearchMessage> /* Search worker */
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
@ -81,59 +93,45 @@ export interface SearchQuery {
|
||||
* is delayed by `1ms` so the input's empty state is allowed to propagate.
|
||||
*
|
||||
* @param el - Search query element
|
||||
* @param worker - Search worker
|
||||
* @param options - Options
|
||||
*
|
||||
* @returns Search query observable
|
||||
*/
|
||||
export function watchSearchQuery(
|
||||
el: HTMLInputElement, { rx$ }: SearchWorker
|
||||
el: HTMLInputElement, { worker$ }: WatchOptions
|
||||
): Observable<SearchQuery> {
|
||||
const fn = __search?.transform || defaultTransform
|
||||
|
||||
/* Immediately show search dialog */
|
||||
/* Support search deep linking */
|
||||
const { searchParams } = getLocation()
|
||||
if (searchParams.has("q"))
|
||||
if (searchParams.has("q")) {
|
||||
setToggle("search", true)
|
||||
|
||||
/* Intercept query parameter (deep link) */
|
||||
const param$ = rx$
|
||||
.pipe(
|
||||
filter(isSearchReadyMessage),
|
||||
take(1),
|
||||
map(() => searchParams.get("q") || "")
|
||||
)
|
||||
/* Set query from parameter */
|
||||
el.value = searchParams.get("q")!
|
||||
el.focus()
|
||||
|
||||
/* Remove query parameter when search is closed */
|
||||
watchToggle("search")
|
||||
.pipe(
|
||||
filter(active => !active),
|
||||
take(1)
|
||||
)
|
||||
.subscribe(() => {
|
||||
const url = new URL(location.href)
|
||||
url.searchParams.delete("q")
|
||||
history.replaceState({}, "", `${url}`)
|
||||
})
|
||||
|
||||
/* Set query from parameter */
|
||||
param$.subscribe(value => { // TODO: not ideal - find a better way
|
||||
if (value) {
|
||||
el.value = value
|
||||
el.focus()
|
||||
}
|
||||
})
|
||||
/* Remove query parameter on close */
|
||||
watchToggle("search")
|
||||
.pipe(
|
||||
first(active => !active)
|
||||
)
|
||||
.subscribe(() => {
|
||||
const url = new URL(location.href)
|
||||
url.searchParams.delete("q")
|
||||
history.replaceState({}, "", `${url}`)
|
||||
})
|
||||
}
|
||||
|
||||
/* Intercept focus and input events */
|
||||
const focus$ = watchElementFocus(el)
|
||||
const value$ = merge(
|
||||
worker$.pipe(first(isSearchReadyMessage)),
|
||||
fromEvent(el, "keyup"),
|
||||
fromEvent(el, "focus").pipe(delay(1)),
|
||||
param$
|
||||
focus$
|
||||
)
|
||||
.pipe(
|
||||
map(() => fn(el.value)),
|
||||
startWith(""),
|
||||
distinctUntilChanged(),
|
||||
map(() => el.value),
|
||||
distinctUntilChanged()
|
||||
)
|
||||
|
||||
/* Combine into single observable */
|
||||
@ -148,39 +146,37 @@ export function watchSearchQuery(
|
||||
* Mount search query
|
||||
*
|
||||
* @param el - Search query element
|
||||
* @param worker - Search worker
|
||||
* @param options - Options
|
||||
*
|
||||
* @returns Search query component observable
|
||||
*/
|
||||
export function mountSearchQuery(
|
||||
el: HTMLInputElement, { tx$, rx$ }: SearchWorker
|
||||
el: HTMLInputElement, { worker$ }: MountOptions
|
||||
): Observable<Component<SearchQuery, HTMLInputElement>> {
|
||||
const push$ = new Subject<SearchQuery>()
|
||||
const done$ = push$.pipe(takeLast(1))
|
||||
const done$ = push$.pipe(ignoreElements(), endWith(true))
|
||||
|
||||
/* Handle value changes */
|
||||
push$
|
||||
/* Handle value change */
|
||||
combineLatest([
|
||||
worker$.pipe(first(isSearchReadyMessage)),
|
||||
push$
|
||||
], (_, query) => query)
|
||||
.pipe(
|
||||
distinctUntilKeyChanged("value"),
|
||||
map(({ value }): SearchQueryMessage => ({
|
||||
distinctUntilKeyChanged("value")
|
||||
)
|
||||
.subscribe(({ value }) => worker$.next({
|
||||
type: SearchMessageType.QUERY,
|
||||
data: value
|
||||
}))
|
||||
)
|
||||
.subscribe(tx$.next.bind(tx$))
|
||||
|
||||
/* Handle focus changes */
|
||||
/* Handle focus change */
|
||||
push$
|
||||
.pipe(
|
||||
distinctUntilKeyChanged("focus")
|
||||
)
|
||||
.subscribe(({ focus }) => {
|
||||
if (focus) {
|
||||
if (focus)
|
||||
setToggle("search", focus)
|
||||
el.placeholder = ""
|
||||
} else {
|
||||
el.placeholder = translation("search.placeholder")
|
||||
}
|
||||
})
|
||||
|
||||
/* Handle reset */
|
||||
@ -191,11 +187,11 @@ export function mountSearchQuery(
|
||||
.subscribe(() => el.focus())
|
||||
|
||||
/* Create and return component */
|
||||
return watchSearchQuery(el, { tx$, rx$ })
|
||||
return watchSearchQuery(el, { worker$ })
|
||||
.pipe(
|
||||
tap(state => push$.next(state)),
|
||||
finalize(() => push$.complete()),
|
||||
map(state => ({ ref: el, ...state })),
|
||||
share()
|
||||
shareReplay(1)
|
||||
)
|
||||
}
|
||||
|
@ -21,17 +21,22 @@
|
||||
*/
|
||||
|
||||
import {
|
||||
EMPTY,
|
||||
Observable,
|
||||
Subject,
|
||||
bufferCount,
|
||||
filter,
|
||||
finalize,
|
||||
first,
|
||||
fromEvent,
|
||||
map,
|
||||
merge,
|
||||
mergeMap,
|
||||
of,
|
||||
share,
|
||||
skipUntil,
|
||||
switchMap,
|
||||
take,
|
||||
takeUntil,
|
||||
tap,
|
||||
withLatestFrom,
|
||||
zipWith
|
||||
@ -40,11 +45,12 @@ import {
|
||||
import { translation } from "~/_"
|
||||
import {
|
||||
getElement,
|
||||
getOptionalElement,
|
||||
watchElementBoundary
|
||||
} from "~/browser"
|
||||
import {
|
||||
SearchMessage,
|
||||
SearchResult,
|
||||
SearchWorker,
|
||||
isSearchReadyMessage,
|
||||
isSearchResultMessage
|
||||
} from "~/integrations"
|
||||
@ -63,6 +69,7 @@ import { SearchQuery } from "../query"
|
||||
*/
|
||||
interface MountOptions {
|
||||
query$: Observable<SearchQuery> /* Search query observable */
|
||||
worker$: Subject<SearchMessage> /* Search worker */
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
@ -76,13 +83,12 @@ interface MountOptions {
|
||||
* the vertical offset of the search result container.
|
||||
*
|
||||
* @param el - Search result list element
|
||||
* @param worker - Search worker
|
||||
* @param options - Options
|
||||
*
|
||||
* @returns Search result list component observable
|
||||
*/
|
||||
export function mountSearchResult(
|
||||
el: HTMLElement, { rx$ }: SearchWorker, { query$ }: MountOptions
|
||||
el: HTMLElement, { worker$, query$ }: MountOptions
|
||||
): Observable<Component<SearchResult>> {
|
||||
const push$ = new Subject<SearchResult>()
|
||||
const boundary$ = watchElementBoundary(el.parentElement!)
|
||||
@ -90,51 +96,43 @@ export function mountSearchResult(
|
||||
filter(Boolean)
|
||||
)
|
||||
|
||||
/* Retrieve container */
|
||||
const container = el.parentElement!
|
||||
|
||||
/* Retrieve nested components */
|
||||
const meta = getElement(":scope > :first-child", el)
|
||||
const list = getElement(":scope > :last-child", el)
|
||||
|
||||
/* Wait until search is ready */
|
||||
const ready$ = rx$
|
||||
.pipe(
|
||||
filter(isSearchReadyMessage),
|
||||
take(1)
|
||||
)
|
||||
|
||||
/* Update search result metadata */
|
||||
push$
|
||||
.pipe(
|
||||
withLatestFrom(query$),
|
||||
skipUntil(ready$)
|
||||
skipUntil(worker$.pipe(first(isSearchReadyMessage)))
|
||||
)
|
||||
.subscribe(([{ items }, { value }]) => {
|
||||
if (value) {
|
||||
switch (items.length) {
|
||||
switch (items.length) {
|
||||
|
||||
/* No results */
|
||||
case 0:
|
||||
meta.textContent = translation("search.result.none")
|
||||
break
|
||||
/* No results */
|
||||
case 0:
|
||||
meta.textContent = value.length
|
||||
? translation("search.result.none")
|
||||
: translation("search.result.placeholder")
|
||||
break
|
||||
|
||||
/* One result */
|
||||
case 1:
|
||||
meta.textContent = translation("search.result.one")
|
||||
break
|
||||
/* One result */
|
||||
case 1:
|
||||
meta.textContent = translation("search.result.one")
|
||||
break
|
||||
|
||||
/* Multiple result */
|
||||
default:
|
||||
meta.textContent = translation(
|
||||
"search.result.other",
|
||||
round(items.length)
|
||||
)
|
||||
}
|
||||
} else {
|
||||
meta.textContent = translation("search.result.placeholder")
|
||||
/* Multiple result */
|
||||
default:
|
||||
const count = round(items.length)
|
||||
meta.textContent = translation("search.result.other", count)
|
||||
}
|
||||
})
|
||||
|
||||
/* Update search result list */
|
||||
push$
|
||||
/* Render search result item */
|
||||
const render$ = push$
|
||||
.pipe(
|
||||
tap(() => list.innerHTML = ""),
|
||||
switchMap(({ items }) => merge(
|
||||
@ -145,14 +143,38 @@ export function mountSearchResult(
|
||||
zipWith(boundary$),
|
||||
switchMap(([chunk]) => chunk)
|
||||
)
|
||||
))
|
||||
)),
|
||||
map(renderSearchResultItem),
|
||||
share()
|
||||
)
|
||||
.subscribe(result => list.appendChild(
|
||||
renderSearchResultItem(result)
|
||||
))
|
||||
|
||||
/* Update search result list */
|
||||
render$.subscribe(item => list.appendChild(item))
|
||||
render$
|
||||
.pipe(
|
||||
mergeMap(item => {
|
||||
const details = getOptionalElement("details", item)
|
||||
if (typeof details === "undefined")
|
||||
return EMPTY
|
||||
|
||||
/* Keep position of details element stable */
|
||||
return fromEvent(details, "toggle")
|
||||
.pipe(
|
||||
takeUntil(push$),
|
||||
map(() => details)
|
||||
)
|
||||
})
|
||||
)
|
||||
.subscribe(details => {
|
||||
if (
|
||||
details.open === false &&
|
||||
details.offsetTop <= container.scrollTop
|
||||
)
|
||||
container.scrollTo({ top: details.offsetTop })
|
||||
})
|
||||
|
||||
/* Filter search result message */
|
||||
const result$ = rx$
|
||||
const result$ = worker$
|
||||
.pipe(
|
||||
filter(isSearchResultMessage),
|
||||
map(({ data }) => data)
|
||||
|
@ -23,9 +23,12 @@
|
||||
import {
|
||||
Observable,
|
||||
Subject,
|
||||
endWith,
|
||||
finalize,
|
||||
fromEvent,
|
||||
ignoreElements,
|
||||
map,
|
||||
takeUntil,
|
||||
tap
|
||||
} from "rxjs"
|
||||
|
||||
@ -102,6 +105,7 @@ export function mountSearchShare(
|
||||
el: HTMLAnchorElement, options: MountOptions
|
||||
): Observable<Component<SearchShare>> {
|
||||
const push$ = new Subject<SearchShare>()
|
||||
const done$ = push$.pipe(ignoreElements(), endWith(true))
|
||||
push$.subscribe(({ url }) => {
|
||||
el.setAttribute("data-clipboard-text", el.href)
|
||||
el.href = `${url}`
|
||||
@ -109,7 +113,10 @@ export function mountSearchShare(
|
||||
|
||||
/* Prevent following of link */
|
||||
fromEvent(el, "click")
|
||||
.subscribe(ev => ev.preventDefault())
|
||||
.pipe(
|
||||
takeUntil(done$)
|
||||
)
|
||||
.subscribe(ev => ev.preventDefault())
|
||||
|
||||
/* Create and return component */
|
||||
return watchSearchShare(el, options)
|
||||
|
@ -37,8 +37,8 @@ import {
|
||||
|
||||
import { Keyboard } from "~/browser"
|
||||
import {
|
||||
SearchMessage,
|
||||
SearchResult,
|
||||
SearchWorker,
|
||||
isSearchResultMessage
|
||||
} from "~/integrations"
|
||||
|
||||
@ -62,6 +62,7 @@ export interface SearchSuggest {}
|
||||
*/
|
||||
interface MountOptions {
|
||||
keyboard$: Observable<Keyboard> /* Keyboard observable */
|
||||
worker$: Subject<SearchMessage> /* Search worker */
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
@ -75,13 +76,12 @@ interface MountOptions {
|
||||
* on the vertical offset of the search result container.
|
||||
*
|
||||
* @param el - Search result list element
|
||||
* @param worker - Search worker
|
||||
* @param options - Options
|
||||
*
|
||||
* @returns Search result list component observable
|
||||
*/
|
||||
export function mountSearchSuggest(
|
||||
el: HTMLElement, { rx$ }: SearchWorker, { keyboard$ }: MountOptions
|
||||
el: HTMLElement, { worker$, keyboard$ }: MountOptions
|
||||
): Observable<Component<SearchSuggest>> {
|
||||
const push$ = new Subject<SearchResult>()
|
||||
|
||||
@ -101,10 +101,10 @@ export function mountSearchSuggest(
|
||||
push$
|
||||
.pipe(
|
||||
combineLatestWith(query$),
|
||||
map(([{ suggestions }, value]) => {
|
||||
map(([{ suggest }, value]) => {
|
||||
const words = value.split(/([\s-]+)/)
|
||||
if (suggestions?.length && words[words.length - 1]) {
|
||||
const last = suggestions[suggestions.length - 1]
|
||||
if (suggest?.length && words[words.length - 1]) {
|
||||
const last = suggest[suggest.length - 1]
|
||||
if (last.startsWith(words[words.length - 1]))
|
||||
words[words.length - 1] = last
|
||||
} else {
|
||||
@ -138,7 +138,7 @@ export function mountSearchSuggest(
|
||||
})
|
||||
|
||||
/* Filter search result message */
|
||||
const result$ = rx$
|
||||
const result$ = worker$
|
||||
.pipe(
|
||||
filter(isSearchResultMessage),
|
||||
map(({ data }) => data)
|
||||
|
@ -29,8 +29,10 @@ import {
|
||||
defer,
|
||||
distinctUntilChanged,
|
||||
distinctUntilKeyChanged,
|
||||
endWith,
|
||||
filter,
|
||||
finalize,
|
||||
ignoreElements,
|
||||
map,
|
||||
merge,
|
||||
of,
|
||||
@ -40,7 +42,6 @@ import {
|
||||
skip,
|
||||
startWith,
|
||||
switchMap,
|
||||
takeLast,
|
||||
takeUntil,
|
||||
tap,
|
||||
withLatestFrom
|
||||
@ -273,7 +274,7 @@ export function mountTableOfContents(
|
||||
): Observable<Component<TableOfContents>> {
|
||||
return defer(() => {
|
||||
const push$ = new Subject<TableOfContents>()
|
||||
const done$ = push$.pipe(takeLast(1))
|
||||
const done$ = push$.pipe(ignoreElements(), endWith(true))
|
||||
push$.subscribe(({ prev, next }) => {
|
||||
|
||||
/* Look forward */
|
||||
|
@ -29,10 +29,10 @@ import {
|
||||
distinctUntilKeyChanged,
|
||||
endWith,
|
||||
finalize,
|
||||
ignoreElements,
|
||||
map,
|
||||
repeat,
|
||||
skip,
|
||||
takeLast,
|
||||
takeUntil,
|
||||
tap
|
||||
} from "rxjs"
|
||||
@ -134,7 +134,7 @@ export function mountBackToTop(
|
||||
el: HTMLElement, { viewport$, header$, main$, target$ }: MountOptions
|
||||
): Observable<Component<BackToTop>> {
|
||||
const push$ = new Subject<BackToTop>()
|
||||
const done$ = push$.pipe(takeLast(1))
|
||||
const done$ = push$.pipe(ignoreElements(), endWith(true))
|
||||
push$.subscribe({
|
||||
|
||||
/* Handle emission */
|
||||
|
@ -1,6 +0,0 @@
|
||||
{
|
||||
"rules": {
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"no-console": "off"
|
||||
}
|
||||
}
|
@ -22,18 +22,21 @@
|
||||
|
||||
import {
|
||||
SearchDocument,
|
||||
SearchDocumentMap,
|
||||
SearchIndex,
|
||||
SearchOptions,
|
||||
setupSearchDocumentMap
|
||||
} from "../document"
|
||||
} from "../config"
|
||||
import {
|
||||
SearchHighlightFactoryFn,
|
||||
setupSearchHighlighter
|
||||
} from "../highlighter"
|
||||
import { SearchOptions } from "../options"
|
||||
Position,
|
||||
PositionTable,
|
||||
highlighter,
|
||||
tokenize
|
||||
} from "../internal"
|
||||
import {
|
||||
SearchQueryTerms,
|
||||
getSearchQueryTerms,
|
||||
parseSearchQuery
|
||||
parseSearchQuery,
|
||||
transformSearchQuery
|
||||
} from "../query"
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
@ -41,74 +44,48 @@ import {
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search index configuration
|
||||
* Search item
|
||||
*/
|
||||
export interface SearchIndexConfig {
|
||||
lang: string[] /* Search languages */
|
||||
separator: string /* Search separator */
|
||||
}
|
||||
|
||||
/**
|
||||
* Search index document
|
||||
*/
|
||||
export interface SearchIndexDocument {
|
||||
location: string /* Document location */
|
||||
title: string /* Document title */
|
||||
text: string /* Document text */
|
||||
tags?: string[] /* Document tags */
|
||||
boost?: number /* Document boost */
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search index
|
||||
*
|
||||
* This interfaces describes the format of the `search_index.json` file which
|
||||
* is automatically built by the MkDocs search plugin.
|
||||
*/
|
||||
export interface SearchIndex {
|
||||
config: SearchIndexConfig /* Search index configuration */
|
||||
docs: SearchIndexDocument[] /* Search index documents */
|
||||
options: SearchOptions /* Search options */
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search metadata
|
||||
*/
|
||||
export interface SearchMetadata {
|
||||
export interface SearchItem extends SearchDocument {
|
||||
score: number /* Score (relevance) */
|
||||
terms: SearchQueryTerms /* Search query terms */
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search result document
|
||||
*/
|
||||
export type SearchResultDocument = SearchDocument & SearchMetadata
|
||||
|
||||
/**
|
||||
* Search result item
|
||||
*/
|
||||
export type SearchResultItem = SearchResultDocument[]
|
||||
|
||||
/* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search result
|
||||
*/
|
||||
export interface SearchResult {
|
||||
items: SearchResultItem[] /* Search result items */
|
||||
suggestions?: string[] /* Search suggestions */
|
||||
items: SearchItem[][] /* Search items */
|
||||
suggest?: string[] /* Search suggestions */
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Create field extractor factory
|
||||
*
|
||||
* @param table - Position table map
|
||||
*
|
||||
* @returns Extractor factory
|
||||
*/
|
||||
function extractor(table: Map<string, PositionTable>) {
|
||||
return (name: keyof SearchDocument) => {
|
||||
return (doc: SearchDocument) => {
|
||||
if (typeof doc[name] === "undefined")
|
||||
return undefined
|
||||
|
||||
/* Compute identifier and initiable table */
|
||||
const id = [doc.location, name].join(":")
|
||||
table.set(id, lunr.tokenizer.table = [])
|
||||
|
||||
/* Return field value */
|
||||
return doc[name]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the difference of two lists of strings
|
||||
*
|
||||
@ -134,85 +111,78 @@ function difference(a: string[], b: string[]): string[] {
|
||||
export class Search {
|
||||
|
||||
/**
|
||||
* Search document mapping
|
||||
*
|
||||
* A mapping of URLs (including hash fragments) to the actual articles and
|
||||
* sections of the documentation. The search document mapping must be created
|
||||
* regardless of whether the index was prebuilt or not, as Lunr.js itself
|
||||
* only stores the actual index.
|
||||
* Search document map
|
||||
*/
|
||||
protected documents: SearchDocumentMap
|
||||
|
||||
/**
|
||||
* Search highlight factory function
|
||||
*/
|
||||
protected highlight: SearchHighlightFactoryFn
|
||||
|
||||
/**
|
||||
* The underlying Lunr.js search index
|
||||
*/
|
||||
protected index: lunr.Index
|
||||
protected map: Map<string, SearchDocument>
|
||||
|
||||
/**
|
||||
* Search options
|
||||
*/
|
||||
protected options: SearchOptions
|
||||
|
||||
/**
|
||||
* The underlying Lunr.js search index
|
||||
*/
|
||||
protected index: lunr.Index
|
||||
|
||||
/**
|
||||
* Internal position table map
|
||||
*/
|
||||
protected table: Map<string, PositionTable>
|
||||
|
||||
/**
|
||||
* Create the search integration
|
||||
*
|
||||
* @param data - Search index
|
||||
*/
|
||||
public constructor({ config, docs, options }: SearchIndex) {
|
||||
const field = extractor(this.table = new Map())
|
||||
|
||||
/* Set up document map and options */
|
||||
this.map = setupSearchDocumentMap(docs)
|
||||
this.options = options
|
||||
|
||||
/* Set up document map and highlighter factory */
|
||||
this.documents = setupSearchDocumentMap(docs)
|
||||
this.highlight = setupSearchHighlighter(config, false)
|
||||
|
||||
/* Set separator for tokenizer */
|
||||
lunr.tokenizer.separator = new RegExp(config.separator)
|
||||
|
||||
/* Create search index */
|
||||
/* Set up document index */
|
||||
this.index = lunr(function () {
|
||||
this.metadataWhitelist = ["position"]
|
||||
this.b(0)
|
||||
|
||||
/* Set up multi-language support */
|
||||
/* Set up (multi-)language support */
|
||||
if (config.lang.length === 1 && config.lang[0] !== "en") {
|
||||
this.use((lunr as any)[config.lang[0]])
|
||||
// @ts-expect-error - namespace indexing not supported
|
||||
this.use(lunr[config.lang[0]])
|
||||
} else if (config.lang.length > 1) {
|
||||
this.use((lunr as any).multiLanguage(...config.lang))
|
||||
this.use(lunr.multiLanguage(...config.lang))
|
||||
}
|
||||
|
||||
/* Set up custom tokenizer (must be after language setup) */
|
||||
this.tokenizer = tokenize as typeof lunr.tokenizer
|
||||
lunr.tokenizer.separator = new RegExp(config.separator)
|
||||
|
||||
/* Compute functions to be removed from the pipeline */
|
||||
const fns = difference([
|
||||
"trimmer", "stopWordFilter", "stemmer"
|
||||
], options.pipeline)
|
||||
], config.pipeline)
|
||||
|
||||
/* Remove functions from the pipeline for registered languages */
|
||||
for (const lang of config.lang.map(language => (
|
||||
language === "en" ? lunr : (lunr as any)[language]
|
||||
))) {
|
||||
// @ts-expect-error - namespace indexing not supported
|
||||
language === "en" ? lunr : lunr[language]
|
||||
)))
|
||||
for (const fn of fns) {
|
||||
this.pipeline.remove(lang[fn])
|
||||
this.searchPipeline.remove(lang[fn])
|
||||
}
|
||||
}
|
||||
|
||||
/* Set up reference */
|
||||
/* Set up index reference */
|
||||
this.ref("location")
|
||||
|
||||
/* Set up fields */
|
||||
this.field("title", { boost: 1e3 })
|
||||
this.field("text")
|
||||
this.field("tags", { boost: 1e6, extractor: doc => {
|
||||
const { tags = [] } = doc as SearchDocument
|
||||
return tags.reduce((list, tag) => [
|
||||
...list,
|
||||
...lunr.tokenizer(tag)
|
||||
], [] as lunr.Token[])
|
||||
} })
|
||||
/* Set up index fields */
|
||||
this.field("title", { boost: 1e3, extractor: field("title") })
|
||||
this.field("text", { boost: 1e0, extractor: field("text") })
|
||||
this.field("tags", { boost: 1e6, extractor: field("tags") })
|
||||
|
||||
/* Index documents */
|
||||
/* Add documents to index */
|
||||
for (const doc of docs)
|
||||
this.add(doc, { boost: doc.boost })
|
||||
})
|
||||
@ -221,105 +191,129 @@ export class Search {
|
||||
/**
|
||||
* Search for matching documents
|
||||
*
|
||||
* The search index which MkDocs provides is divided up into articles, which
|
||||
* contain the whole content of the individual pages, and sections, which only
|
||||
* contain the contents of the subsections obtained by breaking the individual
|
||||
* pages up at `h1` ... `h6`. As there may be many sections on different pages
|
||||
* with identical titles (for example within this very project, e.g. "Usage"
|
||||
* or "Installation"), they need to be put into the context of the containing
|
||||
* page. For this reason, section results are grouped within their respective
|
||||
* articles which are the top-level results that are returned.
|
||||
* @param query - Search query
|
||||
*
|
||||
* @param query - Query value
|
||||
*
|
||||
* @returns Search results
|
||||
* @returns Search result
|
||||
*/
|
||||
public search(query: string): SearchResult {
|
||||
if (query) {
|
||||
try {
|
||||
const highlight = this.highlight(query)
|
||||
query = transformSearchQuery(query)
|
||||
if (!query)
|
||||
return { items: [] }
|
||||
|
||||
/* Parse query to extract clauses for analysis */
|
||||
const clauses = parseSearchQuery(query)
|
||||
.filter(clause => (
|
||||
clause.presence !== lunr.Query.presence.PROHIBITED
|
||||
))
|
||||
/* Parse query to extract clauses for analysis */
|
||||
const clauses = parseSearchQuery(query)
|
||||
.filter(clause => (
|
||||
clause.presence !== lunr.Query.presence.PROHIBITED
|
||||
))
|
||||
|
||||
/* Perform search and post-process results */
|
||||
const groups = this.index.search(`${query}*`)
|
||||
/* Perform search and post-process results */
|
||||
const groups = this.index.search(query)
|
||||
|
||||
/* Apply post-query boosts based on title and search query terms */
|
||||
.reduce<SearchResultItem>((item, { ref, score, matchData }) => {
|
||||
const document = this.documents.get(ref)
|
||||
if (typeof document !== "undefined") {
|
||||
const { location, title, text, tags, parent } = document
|
||||
/* Apply post-query boosts based on title and search query terms */
|
||||
.reduce<SearchItem[]>((item, { ref, score, matchData }) => {
|
||||
let doc = this.map.get(ref)
|
||||
if (typeof doc !== "undefined") {
|
||||
doc = { ...doc }
|
||||
if (doc.tags)
|
||||
doc.tags = [...doc.tags]
|
||||
|
||||
/* Compute and analyze search query terms */
|
||||
const terms = getSearchQueryTerms(
|
||||
clauses,
|
||||
Object.keys(matchData.metadata)
|
||||
/* Compute and analyze search query terms */
|
||||
const terms = getSearchQueryTerms(
|
||||
clauses,
|
||||
Object.keys(matchData.metadata)
|
||||
)
|
||||
|
||||
// we must collect all positions for each term!
|
||||
// we now take the keys of the index
|
||||
for (const field of this.index.fields) {
|
||||
if (!(field in doc))
|
||||
continue
|
||||
|
||||
/* Collect matches */
|
||||
const positions: Position[] = []
|
||||
for (const match of Object.values(matchData.metadata))
|
||||
if (field in match)
|
||||
positions.push(...match[field].position)
|
||||
|
||||
// @ts-expect-error - @todo fix typings
|
||||
if (Array.isArray(doc[field])) {
|
||||
// @ts-expect-error - @todo fix typings
|
||||
for (let i = 0; i < doc[field].length; i++) {
|
||||
// @ts-expect-error - @todo fix typings
|
||||
doc[field][i] = highlighter(doc[field][i],
|
||||
this.table.get([doc.location, field].join(":"))!,
|
||||
positions
|
||||
)
|
||||
}
|
||||
} else {
|
||||
// @ts-expect-error - @todo fix typings
|
||||
doc[field] = highlighter(doc[field],
|
||||
this.table.get([doc.location, field].join(":"))!,
|
||||
positions
|
||||
)
|
||||
|
||||
/* Highlight title and text and apply post-query boosts */
|
||||
const boost = +!parent + +Object.values(terms).every(t => t)
|
||||
item.push({
|
||||
location,
|
||||
title: highlight(title),
|
||||
text: highlight(text),
|
||||
...tags && { tags: tags.map(highlight) },
|
||||
score: score * (1 + boost),
|
||||
terms
|
||||
})
|
||||
}
|
||||
return item
|
||||
}, [])
|
||||
}
|
||||
|
||||
/* Sort search results again after applying boosts */
|
||||
.sort((a, b) => b.score - a.score)
|
||||
/* Highlight title and text and apply post-query boosts */
|
||||
const boost = +!doc.parent +
|
||||
Object.values(terms)
|
||||
.filter(t => t).length /
|
||||
Object.keys(terms).length
|
||||
|
||||
/* Group search results by page */
|
||||
.reduce((items, result) => {
|
||||
const document = this.documents.get(result.location)
|
||||
if (typeof document !== "undefined") {
|
||||
const ref = "parent" in document
|
||||
? document.parent!.location
|
||||
: document.location
|
||||
items.set(ref, [...items.get(ref) || [], result])
|
||||
}
|
||||
return items
|
||||
}, new Map<string, SearchResultItem>())
|
||||
|
||||
/* Generate search suggestions, if desired */
|
||||
let suggestions: string[] | undefined
|
||||
if (this.options.suggestions) {
|
||||
const titles = this.index.query(builder => {
|
||||
for (const clause of clauses)
|
||||
builder.term(clause.term, {
|
||||
fields: ["title"],
|
||||
presence: lunr.Query.presence.REQUIRED,
|
||||
wildcard: lunr.Query.wildcard.TRAILING
|
||||
})
|
||||
/* Append item */
|
||||
item.push({
|
||||
...doc,
|
||||
score: score * (1 + boost ** 2),
|
||||
terms
|
||||
})
|
||||
|
||||
/* Retrieve suggestions for best match */
|
||||
suggestions = titles.length
|
||||
? Object.keys(titles[0].matchData.metadata)
|
||||
: []
|
||||
}
|
||||
return item
|
||||
}, [])
|
||||
|
||||
/* Return items and suggestions */
|
||||
return {
|
||||
items: [...groups.values()],
|
||||
...typeof suggestions !== "undefined" && { suggestions }
|
||||
/* Sort search results again after applying boosts */
|
||||
.sort((a, b) => b.score - a.score)
|
||||
|
||||
/* Group search results by article */
|
||||
.reduce((items, result) => {
|
||||
const doc = this.map.get(result.location)
|
||||
if (typeof doc !== "undefined") {
|
||||
const ref = doc.parent
|
||||
? doc.parent.location
|
||||
: doc.location
|
||||
items.set(ref, [...items.get(ref) || [], result])
|
||||
}
|
||||
return items
|
||||
}, new Map<string, SearchItem[]>())
|
||||
|
||||
/* Log errors to console (for now) */
|
||||
} catch {
|
||||
console.warn(`Invalid query: ${query} – see https://bit.ly/2s3ChXG`)
|
||||
/* Ensure that every item set has an article */
|
||||
for (const [ref, items] of groups)
|
||||
if (!items.find(item => item.location === ref)) {
|
||||
const doc = this.map.get(ref)!
|
||||
items.push({ ...doc, score: 0, terms: {} })
|
||||
}
|
||||
|
||||
/* Generate search suggestions, if desired */
|
||||
let suggest: string[] | undefined
|
||||
if (this.options.suggest) {
|
||||
const titles = this.index.query(builder => {
|
||||
for (const clause of clauses)
|
||||
builder.term(clause.term, {
|
||||
fields: ["title"],
|
||||
presence: lunr.Query.presence.REQUIRED,
|
||||
wildcard: lunr.Query.wildcard.TRAILING
|
||||
})
|
||||
})
|
||||
|
||||
/* Retrieve suggestions for best match */
|
||||
suggest = titles.length
|
||||
? Object.keys(titles[0].matchData.metadata)
|
||||
: []
|
||||
}
|
||||
|
||||
/* Return nothing in case of error or empty query */
|
||||
return { items: [] }
|
||||
/* Return search result */
|
||||
return {
|
||||
items: [...groups.values()],
|
||||
...typeof suggest !== "undefined" && { suggest }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
115
src/assets/javascripts/integrations/search/config/index.ts
Normal file
115
src/assets/javascripts/integrations/search/config/index.ts
Normal file
@ -0,0 +1,115 @@
|
||||
/*
|
||||
* Copyright (c) 2016-2022 Martin Donath <martin.donath@squidfunk.com>
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to
|
||||
* deal in the Software without restriction, including without limitation the
|
||||
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
* sell copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Types
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search configuration
|
||||
*/
|
||||
export interface SearchConfig {
|
||||
lang: string[] /* Search languages */
|
||||
separator: string /* Search separator */
|
||||
pipeline: SearchPipelineFn[] /* Search pipeline */
|
||||
}
|
||||
|
||||
/**
|
||||
* Search document
|
||||
*/
|
||||
export interface SearchDocument {
|
||||
location: string /* Document location */
|
||||
title: string /* Document title */
|
||||
text: string /* Document text */
|
||||
tags?: string[] /* Document tags */
|
||||
boost?: number /* Document boost */
|
||||
parent?: SearchDocument /* Document parent */
|
||||
}
|
||||
|
||||
/**
|
||||
* Search options
|
||||
*/
|
||||
export interface SearchOptions {
|
||||
suggest: boolean /* Search suggestions */
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search index
|
||||
*/
|
||||
export interface SearchIndex {
|
||||
config: SearchConfig /* Search configuration */
|
||||
docs: SearchDocument[] /* Search documents */
|
||||
options: SearchOptions /* Search options */
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Helper types
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search pipeline function
|
||||
*/
|
||||
type SearchPipelineFn =
|
||||
| "trimmer" /* Trimmer */
|
||||
| "stopWordFilter" /* Stop word filter */
|
||||
| "stemmer" /* Stemmer */
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Create a search document map
|
||||
*
|
||||
* This function creates a mapping of URLs (including anchors) to the actual
|
||||
* articles and sections. It relies on the invariant that the search index is
|
||||
* ordered with the main article appearing before all sections with anchors.
|
||||
* If this is not the case, the logic music be changed.
|
||||
*
|
||||
* @param docs - Search documents
|
||||
*
|
||||
* @returns Search document map
|
||||
*/
|
||||
export function setupSearchDocumentMap(
|
||||
docs: SearchDocument[]
|
||||
): Map<string, SearchDocument> {
|
||||
const map = new Map<string, SearchDocument>()
|
||||
for (const doc of docs) {
|
||||
const [path] = doc.location.split("#")
|
||||
|
||||
/* Add document article */
|
||||
const article = map.get(path)
|
||||
if (typeof article === "undefined") {
|
||||
map.set(path, doc)
|
||||
|
||||
/* Add document section */
|
||||
} else {
|
||||
map.set(doc.location, doc)
|
||||
doc.parent = article
|
||||
}
|
||||
}
|
||||
|
||||
/* Return search document map */
|
||||
return map
|
||||
}
|
@ -1,107 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2016-2022 Martin Donath <martin.donath@squidfunk.com>
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to
|
||||
* deal in the Software without restriction, including without limitation the
|
||||
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
* sell copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
import escapeHTML from "escape-html"
|
||||
|
||||
import { SearchIndexDocument } from "../_"
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Types
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search document
|
||||
*/
|
||||
export interface SearchDocument extends SearchIndexDocument {
|
||||
parent?: SearchIndexDocument /* Parent article */
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search document mapping
|
||||
*/
|
||||
export type SearchDocumentMap = Map<string, SearchDocument>
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Create a search document mapping
|
||||
*
|
||||
* @param docs - Search index documents
|
||||
*
|
||||
* @returns Search document map
|
||||
*/
|
||||
export function setupSearchDocumentMap(
|
||||
docs: SearchIndexDocument[]
|
||||
): SearchDocumentMap {
|
||||
const documents = new Map<string, SearchDocument>()
|
||||
const parents = new Set<SearchDocument>()
|
||||
for (const doc of docs) {
|
||||
const [path, hash] = doc.location.split("#")
|
||||
|
||||
/* Extract location, title and tags */
|
||||
const location = doc.location
|
||||
const title = doc.title
|
||||
const tags = doc.tags
|
||||
|
||||
/* Escape and cleanup text */
|
||||
const text = escapeHTML(doc.text)
|
||||
.replace(/\s+(?=[,.:;!?])/g, "")
|
||||
.replace(/\s+/g, " ")
|
||||
|
||||
/* Handle section */
|
||||
if (hash) {
|
||||
const parent = documents.get(path)!
|
||||
|
||||
/* Ignore first section, override article */
|
||||
if (!parents.has(parent)) {
|
||||
parent.title = doc.title
|
||||
parent.text = text
|
||||
|
||||
/* Remember that we processed the article */
|
||||
parents.add(parent)
|
||||
|
||||
/* Add subsequent section */
|
||||
} else {
|
||||
documents.set(location, {
|
||||
location,
|
||||
title,
|
||||
text,
|
||||
parent
|
||||
})
|
||||
}
|
||||
|
||||
/* Add article */
|
||||
} else {
|
||||
documents.set(location, {
|
||||
location,
|
||||
title,
|
||||
text,
|
||||
...tags && { tags }
|
||||
})
|
||||
}
|
||||
}
|
||||
return documents
|
||||
}
|
@ -22,7 +22,7 @@
|
||||
|
||||
import escapeHTML from "escape-html"
|
||||
|
||||
import { SearchIndexConfig } from "../_"
|
||||
import { SearchConfig } from "../config"
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Types
|
||||
@ -53,15 +53,21 @@ export type SearchHighlightFactoryFn = (query: string) => SearchHighlightFn
|
||||
/**
|
||||
* Create a search highlighter
|
||||
*
|
||||
* @param config - Search index configuration
|
||||
* @param escape - Whether to escape HTML
|
||||
* @param config - Search configuration
|
||||
*
|
||||
* @returns Search highlight factory function
|
||||
*/
|
||||
export function setupSearchHighlighter(
|
||||
config: SearchIndexConfig, escape: boolean
|
||||
config: SearchConfig
|
||||
): SearchHighlightFactoryFn {
|
||||
const separator = new RegExp(config.separator, "img")
|
||||
// Hack: temporarily remove pure lookaheads
|
||||
const regex = config.separator.split("|").map(term => {
|
||||
const temp = term.replace(/(\(\?[!=][^)]+\))/g, "")
|
||||
return temp.length === 0 ? "<22>" : term
|
||||
})
|
||||
.join("|")
|
||||
|
||||
const separator = new RegExp(regex, "img")
|
||||
const highlight = (_: unknown, data: string, term: string) => {
|
||||
return `${data}<mark data-md-highlight>${term}</mark>`
|
||||
}
|
||||
@ -73,19 +79,15 @@ export function setupSearchHighlighter(
|
||||
.trim()
|
||||
|
||||
/* Create search term match expression */
|
||||
const match = new RegExp(`(^|${config.separator})(${
|
||||
const match = new RegExp(`(^|${config.separator}|)(${
|
||||
query
|
||||
.replace(/[|\\{}()[\]^$+*?.-]/g, "\\$&")
|
||||
.replace(separator, "|")
|
||||
})`, "img")
|
||||
|
||||
/* Highlight string value */
|
||||
return value => (
|
||||
escape
|
||||
? escapeHTML(value)
|
||||
: value
|
||||
)
|
||||
.replace(match, highlight)
|
||||
.replace(/<\/mark>(\s+)<mark[^>]*>/img, "$1")
|
||||
return value => escapeHTML(value)
|
||||
.replace(match, highlight)
|
||||
.replace(/<\/mark>(\s+)<mark[^>]*>/img, "$1")
|
||||
}
|
||||
}
|
||||
|
@ -21,8 +21,7 @@
|
||||
*/
|
||||
|
||||
export * from "./_"
|
||||
export * from "./document"
|
||||
export * from "./config"
|
||||
export * from "./highlighter"
|
||||
export * from "./options"
|
||||
export * from "./query"
|
||||
export * from "./worker"
|
||||
|
@ -0,0 +1,6 @@
|
||||
{
|
||||
"rules": {
|
||||
"no-fallthrough": "off",
|
||||
"no-underscore-dangle": "off"
|
||||
}
|
||||
}
|
@ -0,0 +1,77 @@
|
||||
/*
|
||||
* Copyright (c) 2016-2022 Martin Donath <martin.donath@squidfunk.com>
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to
|
||||
* deal in the Software without restriction, including without limitation the
|
||||
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
* sell copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Helper types
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Visitor function
|
||||
*
|
||||
* @param start - Start offset
|
||||
* @param end - End offset
|
||||
*/
|
||||
type VisitorFn = (
|
||||
start: number, end: number
|
||||
) => void
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Split a string using the given separator
|
||||
*
|
||||
* This function intentionally takes a visitor function contrary to collecting
|
||||
* and returning all ranges, as it's significantly more memory efficient.
|
||||
*
|
||||
* @param value - String value
|
||||
* @param separator - Separator
|
||||
* @param fn - Visitor function
|
||||
*/
|
||||
export function split(
|
||||
value: string, separator: RegExp, fn: VisitorFn
|
||||
): void {
|
||||
separator = new RegExp(separator, "g")
|
||||
|
||||
/* Split string using separator */
|
||||
let match: RegExpExecArray | null
|
||||
let index = 0
|
||||
do {
|
||||
match = separator.exec(value)
|
||||
|
||||
/* Emit non-empty range */
|
||||
const until = match?.index ?? value.length
|
||||
if (index < until)
|
||||
fn(index, until)
|
||||
|
||||
/* Update last index */
|
||||
if (match) {
|
||||
const [term] = match
|
||||
index = match.index + term.length
|
||||
|
||||
/* Support zero-length lookaheads */
|
||||
if (term.length === 0)
|
||||
separator.lastIndex = match.index + 1
|
||||
}
|
||||
} while (match)
|
||||
}
|
@ -0,0 +1,89 @@
|
||||
/*
|
||||
* Copyright (c) 2016-2022 Martin Donath <martin.donath@squidfunk.com>
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to
|
||||
* deal in the Software without restriction, including without limitation the
|
||||
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
* sell copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Helper types
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Visitor function
|
||||
*
|
||||
* @param block - Block index
|
||||
* @param operation - Operation index
|
||||
* @param start - Start offset
|
||||
* @param end - End offset
|
||||
*/
|
||||
type VisitorFn = (
|
||||
block: number, operation: number, start: number, end: number
|
||||
) => void
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Extract all non-HTML parts of a string
|
||||
*
|
||||
* This function preprocesses the given string by isolating all non-HTML parts
|
||||
* of a string, in order to ensure that HTML tags are removed before indexing.
|
||||
* This function intentionally takes a visitor function contrary to collecting
|
||||
* and returning all sections, as it's significantly more memory efficient.
|
||||
*
|
||||
* @param value - String value
|
||||
* @param fn - Visitor function
|
||||
*/
|
||||
export function extract(
|
||||
value: string, fn: VisitorFn
|
||||
): void {
|
||||
|
||||
let block = 0 /* Current block */
|
||||
let start = 0 /* Current start offset */
|
||||
let end = 0 /* Current end offset */
|
||||
|
||||
/* Split string into sections */
|
||||
for (let stack = 0; end < value.length; end++) {
|
||||
|
||||
/* Tag start after non-empty section */
|
||||
if (value.charAt(end) === "<" && end > start) {
|
||||
fn(block, 1, start, start = end)
|
||||
|
||||
/* Tag end */
|
||||
} else if (value.charAt(end) === ">") {
|
||||
if (value.charAt(start + 1) === "/") {
|
||||
if (--stack === 0)
|
||||
fn(block++, 2, start, end + 1)
|
||||
|
||||
/* Tag is not self-closing */
|
||||
} else if (value.charAt(end - 1) !== "/") {
|
||||
if (stack++ === 0)
|
||||
fn(block, 0, start, end + 1)
|
||||
}
|
||||
|
||||
/* New section */
|
||||
start = end + 1
|
||||
}
|
||||
}
|
||||
|
||||
/* Add trailing section */
|
||||
if (end > start)
|
||||
fn(block, 1, start, end)
|
||||
}
|
@ -0,0 +1,90 @@
|
||||
/*
|
||||
* Copyright (c) 2016-2022 Martin Donath <martin.donath@squidfunk.com>
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to
|
||||
* deal in the Software without restriction, including without limitation the
|
||||
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
* sell copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
import { Position, PositionTable } from "../tokenizer"
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Highlight all occurrences in a string
|
||||
*
|
||||
* @param value - String value
|
||||
* @param table - Table for indexing
|
||||
* @param positions - Occurrences
|
||||
*
|
||||
* @returns Highlighted string value
|
||||
*/
|
||||
export function highlighter(
|
||||
value: string, table: PositionTable, positions: Position[]
|
||||
): string {
|
||||
const slices: string[] = []
|
||||
|
||||
/* Map matches to blocks */
|
||||
const blocks = new Map<number, number[]>()
|
||||
for (const i of positions.sort((a, b) => a - b)) {
|
||||
const block = i >>> 20
|
||||
const index = i & 0xFFFFF
|
||||
|
||||
/* Ensure presence of block group */
|
||||
let group = blocks.get(block)
|
||||
if (typeof group === "undefined")
|
||||
blocks.set(block, group = [])
|
||||
|
||||
/* Add index to group */
|
||||
group.push(index)
|
||||
}
|
||||
|
||||
/* Compute slices */
|
||||
for (const [block, indexes] of blocks) {
|
||||
const t = table[block]
|
||||
|
||||
/* Extract start and end positions, and length */
|
||||
const start = t[0] >>> 12
|
||||
const end = t[t.length - 1] >>> 12
|
||||
const length = t[t.length - 1] >>> 2 & 0x3FF
|
||||
|
||||
/* Extract and highlight slice/block */
|
||||
let slice = value.slice(start, end + length)
|
||||
for (const i of indexes.sort((a, b) => b - a)) {
|
||||
|
||||
/* Retrieve offset and length of match */
|
||||
const p = (t[i] >>> 12) - start
|
||||
const q = (t[i] >>> 2 & 0x3FF) + p
|
||||
|
||||
/* Wrap occurrence */
|
||||
slice = [
|
||||
slice.slice(0, p),
|
||||
"<mark>", slice.slice(p, q), "</mark>",
|
||||
slice.slice(q)
|
||||
].join("")
|
||||
}
|
||||
|
||||
/* Append slice and abort if we have two */
|
||||
if (slices.push(slice) === 2)
|
||||
break
|
||||
}
|
||||
|
||||
/* Return highlighted string value */
|
||||
return slices.join("")
|
||||
}
|
@ -20,29 +20,7 @@
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Types
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search pipeline function
|
||||
*/
|
||||
export type SearchPipelineFn =
|
||||
| "trimmer" /* Trimmer */
|
||||
| "stopWordFilter" /* Stop word filter */
|
||||
| "stemmer" /* Stemmer */
|
||||
|
||||
/**
|
||||
* Search pipeline
|
||||
*/
|
||||
export type SearchPipeline = SearchPipelineFn[]
|
||||
|
||||
/* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search options
|
||||
*/
|
||||
export interface SearchOptions {
|
||||
pipeline: SearchPipeline /* Search pipeline */
|
||||
suggestions: boolean /* Search suggestions */
|
||||
}
|
||||
export * from "./_"
|
||||
export * from "./extractor"
|
||||
export * from "./highlighter"
|
||||
export * from "./tokenizer"
|
@ -0,0 +1,148 @@
|
||||
/*
|
||||
* Copyright (c) 2016-2022 Martin Donath <martin.donath@squidfunk.com>
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to
|
||||
* deal in the Software without restriction, including without limitation the
|
||||
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
* sell copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
import { split } from "../_"
|
||||
import { extract } from "../extractor"
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Types
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Table for indexing
|
||||
*/
|
||||
export type PositionTable = number[][]
|
||||
|
||||
/**
|
||||
* Position
|
||||
*/
|
||||
export type Position = number
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Split a string into tokens
|
||||
*
|
||||
* This tokenizer supersedes the default tokenizer that is provided by Lunr.js,
|
||||
* as it is aware of HTML tags and allows for multi-character splitting.
|
||||
*
|
||||
* @param input - String value or token
|
||||
*
|
||||
* @returns Tokens
|
||||
*/
|
||||
export function tokenize(
|
||||
input?: string | string[]
|
||||
): lunr.Token[] {
|
||||
const tokens: lunr.Token[] = []
|
||||
|
||||
/**
|
||||
* Initialize segmenter, if loaded
|
||||
*
|
||||
* Note that doing this here is not ideal, but it's okay as we just test it
|
||||
* before bringing the new search implementation in its final shape.
|
||||
*/
|
||||
const segmenter = "TinySegmenter" in lunr
|
||||
? new lunr.TinySegmenter()
|
||||
: undefined
|
||||
|
||||
/* Tokenize an array of string values */
|
||||
if (Array.isArray(input)) {
|
||||
// @todo: handle multi-valued fields (e.g. tags)
|
||||
for (const value of input)
|
||||
tokens.push(...tokenize(value))
|
||||
|
||||
/* Tokenize a string value */
|
||||
} else if (input) {
|
||||
const table = lunr.tokenizer.table
|
||||
|
||||
/* Split string into sections and tokenize content blocks */
|
||||
extract(input, (block, type, start, end) => {
|
||||
if (type & 1) {
|
||||
const section = input.slice(start, end)
|
||||
split(section, lunr.tokenizer.separator, (index, until) => {
|
||||
|
||||
/**
|
||||
* Apply segmenter after tokenization. Note that the segmenter will
|
||||
* also split words at word boundaries, which is not what we want, so
|
||||
* we need to check if we can somehow mitigate this behavior.
|
||||
*/
|
||||
if (typeof segmenter !== "undefined") {
|
||||
const subsection = section.slice(index, until)
|
||||
if (/^[MHIK]$/.test(segmenter.ctype_(subsection))) {
|
||||
const segments = segmenter.segment(subsection)
|
||||
for (let i = 0, l = 0; i < segments.length; i++) {
|
||||
|
||||
/* Add block to table */
|
||||
table[block] ||= []
|
||||
table[block].push(
|
||||
start + index + l << 12 |
|
||||
segments[i].length << 2 |
|
||||
type
|
||||
)
|
||||
|
||||
/* Add block as token */
|
||||
tokens.push(new lunr.Token(
|
||||
segments[i].toLowerCase(), {
|
||||
position: block << 20 | table[block].length - 1
|
||||
}
|
||||
))
|
||||
|
||||
/* Keep track of length */
|
||||
l += segments[i].length
|
||||
}
|
||||
return // combine segmenter with other approach!?
|
||||
}
|
||||
}
|
||||
|
||||
/* Add block to table */
|
||||
table[block] ||= []
|
||||
table[block].push(
|
||||
start + index << 12 |
|
||||
until - index << 2 |
|
||||
type
|
||||
)
|
||||
|
||||
/* Add block as token */
|
||||
tokens.push(new lunr.Token(
|
||||
section.slice(index, until).toLowerCase(), {
|
||||
position: block << 20 | table[block].length - 1
|
||||
}
|
||||
))
|
||||
})
|
||||
|
||||
/* Add non-content block to table */
|
||||
} else {
|
||||
table[block] ||= []
|
||||
table[block].push(
|
||||
start << 12 |
|
||||
end - start << 2 |
|
||||
type
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/* Return tokens */
|
||||
return tokens
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
{
|
||||
"rules": {
|
||||
"no-control-regex": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off"
|
||||
}
|
||||
}
|
@ -20,6 +20,9 @@
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
import { split } from "../../internal"
|
||||
import { transform } from "../transform"
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Types
|
||||
* ------------------------------------------------------------------------- */
|
||||
@ -43,9 +46,54 @@ export type SearchQueryTerms = Record<string, boolean>
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Transform search query
|
||||
*
|
||||
* This function lexes the given search query and applies the transformation
|
||||
* function to each term, preserving markup like `+` and `-` modifiers.
|
||||
*
|
||||
* @param query - Search query
|
||||
*
|
||||
* @returns Search query
|
||||
*/
|
||||
export function transformSearchQuery(
|
||||
query: string
|
||||
): string {
|
||||
|
||||
/* Split query terms with tokenizer */
|
||||
return transform(query, part => {
|
||||
const terms: string[] = []
|
||||
|
||||
/* Initialize lexer and analyze part */
|
||||
const lexer = new lunr.QueryLexer(part)
|
||||
lexer.run()
|
||||
|
||||
/* Extract and tokenize term from lexeme */
|
||||
for (const { type, str: term, start, end } of lexer.lexemes)
|
||||
if (type === "TERM")
|
||||
split(term, lunr.tokenizer.separator, (...range) => {
|
||||
terms.push([
|
||||
part.slice(0, start),
|
||||
term.slice(...range),
|
||||
part.slice(end)
|
||||
].join(""))
|
||||
})
|
||||
|
||||
/* Return terms */
|
||||
return terms
|
||||
})
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Parse a search query for analysis
|
||||
*
|
||||
* Lunr.js itself has a bug where it doesn't detect or remove wildcards for
|
||||
* query clauses, so we must do this here.
|
||||
*
|
||||
* @see https://bit.ly/3DpTGtz - GitHub issue
|
||||
*
|
||||
* @param value - Query value
|
||||
*
|
||||
* @returns Search query clauses
|
||||
@ -53,11 +101,28 @@ export type SearchQueryTerms = Record<string, boolean>
|
||||
export function parseSearchQuery(
|
||||
value: string
|
||||
): SearchQueryClause[] {
|
||||
const query = new (lunr as any).Query(["title", "text"])
|
||||
const parser = new (lunr as any).QueryParser(value, query)
|
||||
const query = new lunr.Query(["title", "text", "tags"])
|
||||
const parser = new lunr.QueryParser(value, query)
|
||||
|
||||
/* Parse and return query clauses */
|
||||
/* Parse Search query */
|
||||
parser.parse()
|
||||
for (const clause of query.clauses) {
|
||||
clause.usePipeline = true
|
||||
|
||||
/* Handle leading wildcard */
|
||||
if (clause.term.startsWith("*")) {
|
||||
clause.wildcard = lunr.Query.wildcard.LEADING
|
||||
clause.term = clause.term.slice(1)
|
||||
}
|
||||
|
||||
/* Handle trailing wildcard */
|
||||
if (clause.term.endsWith("*")) {
|
||||
clause.wildcard = lunr.Query.wildcard.TRAILING
|
||||
clause.term = clause.term.slice(0, -1)
|
||||
}
|
||||
}
|
||||
|
||||
/* Return query clauses */
|
||||
return query.clauses
|
||||
}
|
||||
|
||||
@ -85,7 +150,7 @@ export function getSearchQueryTerms(
|
||||
|
||||
/* Annotate unmatched non-stopword query clauses */
|
||||
for (const clause of clauses)
|
||||
if (lunr.stopWordFilter?.(clause.term as any))
|
||||
if (lunr.stopWordFilter?.(clause.term))
|
||||
result[clause.term] = false
|
||||
|
||||
/* Return query terms */
|
||||
|
@ -1,5 +0,0 @@
|
||||
{
|
||||
"rules": {
|
||||
"no-control-regex": "off"
|
||||
}
|
||||
}
|
@ -21,17 +21,19 @@
|
||||
*/
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Types
|
||||
* Helper types
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search transformation function
|
||||
* Visitor function
|
||||
*
|
||||
* @param value - Query value
|
||||
* @param value - String value
|
||||
*
|
||||
* @returns Transformed query value
|
||||
* @returns String term(s)
|
||||
*/
|
||||
export type SearchTransformFn = (value: string) => string
|
||||
type VisitorFn = (
|
||||
value: string
|
||||
) => string | string[]
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Functions
|
||||
@ -40,32 +42,55 @@ export type SearchTransformFn = (value: string) => string
|
||||
/**
|
||||
* Default transformation function
|
||||
*
|
||||
* 1. Search for terms in quotation marks and prepend a `+` modifier to denote
|
||||
* that the resulting document must contain all terms, converting the query
|
||||
* to an `AND` query (as opposed to the default `OR` behavior). While users
|
||||
* may expect terms enclosed in quotation marks to map to span queries, i.e.
|
||||
* for which order is important, Lunr.js doesn't support them, so the best
|
||||
* we can do is to convert the terms to an `AND` query.
|
||||
* 1. Trim excess whitespace from left and right.
|
||||
*
|
||||
* 2. Replace control characters which are not located at the beginning of the
|
||||
* 2. Search for parts in quotation marks and prepend a `+` modifier to denote
|
||||
* that the resulting document must contain all parts, converting the query
|
||||
* to an `AND` query (as opposed to the default `OR` behavior). While users
|
||||
* may expect parts enclosed in quotation marks to map to span queries, i.e.
|
||||
* for which order is important, Lunr.js doesn't support them, so the best
|
||||
* we can do is to convert the parts to an `AND` query.
|
||||
*
|
||||
* 3. Replace control characters which are not located at the beginning of the
|
||||
* query or preceded by white space, or are not followed by a non-whitespace
|
||||
* character or are at the end of the query string. Furthermore, filter
|
||||
* unmatched quotation marks.
|
||||
*
|
||||
* 3. Trim excess whitespace from left and right.
|
||||
* 4. Split the query string at whitespace, then pass each part to the visitor
|
||||
* function for tokenization, and append a wildcard to every resulting term
|
||||
* that is not explicitly marked with a `+`, `-`, `~` or `^` modifier, since
|
||||
* it ensures consistent and stable ranking when multiple terms are entered.
|
||||
* Also, if a fuzzy or boost modifier are given, but no numeric value has
|
||||
* been entered, default to 1 to not induce a query error.
|
||||
*
|
||||
* @param query - Query value
|
||||
* @param fn - Visitor function
|
||||
*
|
||||
* @returns Transformed query value
|
||||
*/
|
||||
export function defaultTransform(query: string): string {
|
||||
export function transform(
|
||||
query: string, fn: VisitorFn = term => term
|
||||
): string {
|
||||
return query
|
||||
.split(/"([^"]+)"/g) /* => 1 */
|
||||
.map((terms, index) => index & 1
|
||||
? terms.replace(/^\b|^(?![^\x00-\x7F]|$)|\s+/g, " +")
|
||||
: terms
|
||||
|
||||
/* => 1 */
|
||||
.trim()
|
||||
|
||||
/* => 2 */
|
||||
.split(/"([^"]+)"/g)
|
||||
.map((parts, index) => index & 1
|
||||
? parts.replace(/^\b|^(?![^\x00-\x7F]|$)|\s+/g, " +")
|
||||
: parts
|
||||
)
|
||||
.join("")
|
||||
.replace(/"|(?:^|\s+)[*+\-:^~]+(?=\s+|$)/g, "") /* => 2 */
|
||||
.trim() /* => 3 */
|
||||
|
||||
/* => 3 */
|
||||
.replace(/"|(?:^|\s+)[*+\-:^~]+(?=\s+|$)/g, "")
|
||||
|
||||
/* => 4 */
|
||||
.split(/\s+/g)
|
||||
.flatMap(fn)
|
||||
.map(term => /([~^]$)/.test(term) ? `${term}1` : term)
|
||||
.map(term => /(^[+-]|[~^]\d+$)/.test(term) ? term : `${term}*`)
|
||||
.join(" ")
|
||||
}
|
||||
|
@ -23,73 +23,21 @@
|
||||
import {
|
||||
ObservableInput,
|
||||
Subject,
|
||||
from,
|
||||
map,
|
||||
share
|
||||
first,
|
||||
merge,
|
||||
of,
|
||||
switchMap
|
||||
} from "rxjs"
|
||||
|
||||
import { configuration, feature, translation } from "~/_"
|
||||
import { WorkerHandler, watchWorker } from "~/browser"
|
||||
import { feature } from "~/_"
|
||||
import { watchToggle, watchWorker } from "~/browser"
|
||||
|
||||
import { SearchIndex } from "../../_"
|
||||
import {
|
||||
SearchOptions,
|
||||
SearchPipeline
|
||||
} from "../../options"
|
||||
import { SearchIndex } from "../../config"
|
||||
import {
|
||||
SearchMessage,
|
||||
SearchMessageType,
|
||||
SearchSetupMessage,
|
||||
isSearchResultMessage
|
||||
SearchMessageType
|
||||
} from "../message"
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Types
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Search worker
|
||||
*/
|
||||
export type SearchWorker = WorkerHandler<SearchMessage>
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Helper functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Set up search index
|
||||
*
|
||||
* @param data - Search index
|
||||
*
|
||||
* @returns Search index
|
||||
*/
|
||||
function setupSearchIndex({ config, docs }: SearchIndex): SearchIndex {
|
||||
|
||||
/* Override default language with value from translation */
|
||||
if (config.lang.length === 1 && config.lang[0] === "en")
|
||||
config.lang = [
|
||||
translation("search.config.lang")
|
||||
]
|
||||
|
||||
/* Override default separator with value from translation */
|
||||
if (config.separator === "[\\s\\-]+")
|
||||
config.separator = translation("search.config.separator")
|
||||
|
||||
/* Set pipeline from translation */
|
||||
const pipeline = translation("search.config.pipeline")
|
||||
.split(/\s*,\s*/)
|
||||
.filter(Boolean) as SearchPipeline
|
||||
|
||||
/* Determine search options */
|
||||
const options: SearchOptions = {
|
||||
pipeline,
|
||||
suggestions: feature("search.suggest")
|
||||
}
|
||||
|
||||
/* Return search index after defaulting */
|
||||
return { config, docs, options }
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
@ -97,46 +45,51 @@ function setupSearchIndex({ config, docs }: SearchIndex): SearchIndex {
|
||||
/**
|
||||
* Set up search worker
|
||||
*
|
||||
* This function creates a web worker to set up and query the search index,
|
||||
* which is done using Lunr.js. The index must be passed as an observable to
|
||||
* enable hacks like _localsearch_ via search index embedding as JSON.
|
||||
* This function creates and initializes a web worker that is used for search,
|
||||
* so that the user interface doesn't freeze. In general, the application does
|
||||
* not care how search is implemented, as long as the web worker conforms to
|
||||
* the format expected by the application as defined in `SearchMessage`. This
|
||||
* allows the author to implement custom search functionality, by providing a
|
||||
* custom web worker via configuration.
|
||||
*
|
||||
* Material for MkDocs' built-in search implementation makes use of Lunr.js, an
|
||||
* efficient and fast implementation for client-side search. Leveraging a tiny
|
||||
* iframe-based web worker shim, search is even supported for the `file://`
|
||||
* protocol, enabling search for local non-hosted builds.
|
||||
*
|
||||
* If the protocol is `file://`, search initialization is deferred to mitigate
|
||||
* freezing, as it's now synchronous by design - see https://bit.ly/3C521EO
|
||||
*
|
||||
* @see https://bit.ly/3igvtQv - How to implement custom search
|
||||
*
|
||||
* @param url - Worker URL
|
||||
* @param index - Search index observable input
|
||||
* @param index$ - Search index observable input
|
||||
*
|
||||
* @returns Search worker
|
||||
*/
|
||||
export function setupSearchWorker(
|
||||
url: string, index: ObservableInput<SearchIndex>
|
||||
): SearchWorker {
|
||||
const config = configuration()
|
||||
const worker = new Worker(url)
|
||||
|
||||
/* Create communication channels and resolve relative links */
|
||||
const tx$ = new Subject<SearchMessage>()
|
||||
const rx$ = watchWorker(worker, { tx$ })
|
||||
url: string, index$: ObservableInput<SearchIndex>
|
||||
): Subject<SearchMessage> {
|
||||
const worker$ = watchWorker<SearchMessage>(url)
|
||||
merge(
|
||||
of(location.protocol !== "file:"),
|
||||
watchToggle("search")
|
||||
)
|
||||
.pipe(
|
||||
map(message => {
|
||||
if (isSearchResultMessage(message)) {
|
||||
for (const result of message.data.items)
|
||||
for (const document of result)
|
||||
document.location = `${new URL(document.location, config.base)}`
|
||||
}
|
||||
return message
|
||||
}),
|
||||
share()
|
||||
first(active => active),
|
||||
switchMap(() => index$)
|
||||
)
|
||||
|
||||
/* Set up search index */
|
||||
from(index)
|
||||
.pipe(
|
||||
map(data => ({
|
||||
.subscribe(({ config, docs }) => worker$.next({
|
||||
type: SearchMessageType.SETUP,
|
||||
data: setupSearchIndex(data)
|
||||
} as SearchSetupMessage))
|
||||
)
|
||||
.subscribe(tx$.next.bind(tx$))
|
||||
data: {
|
||||
config,
|
||||
docs,
|
||||
options: {
|
||||
suggest: feature("search.suggest")
|
||||
}
|
||||
}
|
||||
}))
|
||||
|
||||
/* Return search worker */
|
||||
return { tx$, rx$ }
|
||||
return worker$
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
{
|
||||
"rules": {
|
||||
"no-console": "off",
|
||||
"@typescript-eslint/no-misused-promises": "off"
|
||||
}
|
||||
}
|
||||
|
@ -22,9 +22,11 @@
|
||||
|
||||
import lunr from "lunr"
|
||||
|
||||
import { getElement } from "~/browser/element/_"
|
||||
import "~/polyfills"
|
||||
|
||||
import { Search, SearchIndexConfig } from "../../_"
|
||||
import { Search } from "../../_"
|
||||
import { SearchConfig } from "../../config"
|
||||
import {
|
||||
SearchMessage,
|
||||
SearchMessageType
|
||||
@ -35,14 +37,18 @@ import {
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Add support for usage with `iframe-worker` polyfill
|
||||
* Add support for `iframe-worker` shim
|
||||
*
|
||||
* While `importScripts` is synchronous when executed inside of a web worker,
|
||||
* it's not possible to provide a synchronous polyfilled implementation. The
|
||||
* cool thing is that awaiting a non-Promise is a noop, so extending the type
|
||||
* definition to return a `Promise` shouldn't break anything.
|
||||
* it's not possible to provide a synchronous shim implementation. The cool
|
||||
* thing is that awaiting a non-Promise will convert it into a Promise, so
|
||||
* extending the type definition to return a `Promise` shouldn't break anything.
|
||||
*
|
||||
* @see https://bit.ly/2PjDnXi - GitHub comment
|
||||
*
|
||||
* @param urls - Scripts to load
|
||||
*
|
||||
* @returns Promise resolving with no result
|
||||
*/
|
||||
declare global {
|
||||
function importScripts(...urls: string[]): Promise<void> | void
|
||||
@ -65,25 +71,25 @@ let index: Search
|
||||
* Fetch (= import) multi-language support through `lunr-languages`
|
||||
*
|
||||
* This function automatically imports the stemmers necessary to process the
|
||||
* languages, which are defined through the search index configuration.
|
||||
* languages which are defined as part of the search configuration.
|
||||
*
|
||||
* If the worker runs inside of an `iframe` (when using `iframe-worker` as
|
||||
* a shim), the base URL for the stemmers to be loaded must be determined by
|
||||
* searching for the first `script` element with a `src` attribute, which will
|
||||
* contain the contents of this script.
|
||||
*
|
||||
* @param config - Search index configuration
|
||||
* @param config - Search configuration
|
||||
*
|
||||
* @returns Promise resolving with no result
|
||||
*/
|
||||
async function setupSearchLanguages(
|
||||
config: SearchIndexConfig
|
||||
config: SearchConfig
|
||||
): Promise<void> {
|
||||
let base = "../lunr"
|
||||
|
||||
/* Detect `iframe-worker` and fix base URL */
|
||||
if (typeof parent !== "undefined" && "IFrameWorker" in parent) {
|
||||
const worker = document.querySelector<HTMLScriptElement>("script[src]")!
|
||||
const worker = getElement<HTMLScriptElement>("script[src]")!
|
||||
const [path] = worker.src.split("/worker")
|
||||
|
||||
/* Prefix base with path */
|
||||
@ -150,9 +156,21 @@ export async function handler(
|
||||
|
||||
/* Search query message */
|
||||
case SearchMessageType.QUERY:
|
||||
return {
|
||||
type: SearchMessageType.RESULT,
|
||||
data: index ? index.search(message.data) : { items: [] }
|
||||
const query = message.data
|
||||
try {
|
||||
return {
|
||||
type: SearchMessageType.RESULT,
|
||||
data: index.search(query)
|
||||
}
|
||||
|
||||
/* Return empty result in case of error */
|
||||
} catch (err) {
|
||||
console.warn(`Invalid query: ${query} – see https://bit.ly/2s3ChXG`)
|
||||
console.warn(err)
|
||||
return {
|
||||
type: SearchMessageType.RESULT,
|
||||
data: { items: [] }
|
||||
}
|
||||
}
|
||||
|
||||
/* All other messages */
|
||||
@ -165,7 +183,7 @@ export async function handler(
|
||||
* Worker
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/* @ts-expect-error - expose Lunr.js in global scope, or stemmers won't work */
|
||||
/* Expose Lunr.js in global scope, or stemmers won't work */
|
||||
self.lunr = lunr
|
||||
|
||||
/* Handle messages */
|
||||
|
@ -20,7 +20,8 @@
|
||||
* IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
import { SearchIndex, SearchResult } from "../../_"
|
||||
import { SearchResult } from "../../_"
|
||||
import { SearchIndex } from "../../config"
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Types
|
||||
@ -84,19 +85,6 @@ export type SearchMessage =
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Type guard for search setup messages
|
||||
*
|
||||
* @param message - Search worker message
|
||||
*
|
||||
* @returns Test result
|
||||
*/
|
||||
export function isSearchSetupMessage(
|
||||
message: SearchMessage
|
||||
): message is SearchSetupMessage {
|
||||
return message.type === SearchMessageType.SETUP
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard for search ready messages
|
||||
*
|
||||
@ -110,19 +98,6 @@ export function isSearchReadyMessage(
|
||||
return message.type === SearchMessageType.READY
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard for search query messages
|
||||
*
|
||||
* @param message - Search worker message
|
||||
*
|
||||
* @returns Test result
|
||||
*/
|
||||
export function isSearchQueryMessage(
|
||||
message: SearchMessage
|
||||
): message is SearchQueryMessage {
|
||||
return message.type === SearchMessageType.QUERY
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard for search result messages
|
||||
*
|
||||
|
@ -23,12 +23,8 @@
|
||||
import { ComponentChild } from "preact"
|
||||
|
||||
import { configuration, feature, translation } from "~/_"
|
||||
import {
|
||||
SearchDocument,
|
||||
SearchMetadata,
|
||||
SearchResultItem
|
||||
} from "~/integrations/search"
|
||||
import { h, truncate } from "~/utilities"
|
||||
import { SearchItem } from "~/integrations/search"
|
||||
import { h } from "~/utilities"
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Helper types
|
||||
@ -55,7 +51,7 @@ const enum Flag {
|
||||
* @returns Element
|
||||
*/
|
||||
function renderSearchDocument(
|
||||
document: SearchDocument & SearchMetadata, flag: Flag
|
||||
document: SearchItem, flag: Flag
|
||||
): HTMLElement {
|
||||
const parent = flag & Flag.PARENT
|
||||
const teaser = flag & Flag.TEASER
|
||||
@ -69,7 +65,8 @@ function renderSearchDocument(
|
||||
.slice(0, -1)
|
||||
|
||||
/* Assemble query string for highlighting */
|
||||
const url = new URL(document.location)
|
||||
const config = configuration()
|
||||
const url = new URL(document.location, config.base)
|
||||
if (feature("search.highlight"))
|
||||
url.searchParams.set("h", Object.entries(document.terms)
|
||||
.filter(([, match]) => match)
|
||||
@ -81,34 +78,25 @@ function renderSearchDocument(
|
||||
return (
|
||||
<a href={`${url}`} class="md-search-result__link" tabIndex={-1}>
|
||||
<article
|
||||
class={["md-search-result__article", ...parent
|
||||
? ["md-search-result__article--document"]
|
||||
: []
|
||||
].join(" ")}
|
||||
class="md-search-result__article md-typeset"
|
||||
data-md-score={document.score.toFixed(2)}
|
||||
>
|
||||
{parent > 0 && <div class="md-search-result__icon md-icon"></div>}
|
||||
<h1 class="md-search-result__title">{document.title}</h1>
|
||||
{parent > 0 && <h1>{document.title}</h1>}
|
||||
{parent <= 0 && <h2>{document.title}</h2>}
|
||||
{teaser > 0 && document.text.length > 0 &&
|
||||
<p class="md-search-result__teaser">
|
||||
{truncate(document.text, 320)}
|
||||
</p>
|
||||
document.text
|
||||
}
|
||||
{document.tags && (
|
||||
<div class="md-typeset">
|
||||
{document.tags.map(tag => {
|
||||
const id = tag.replace(/<[^>]+>/g, "")
|
||||
const type = tags
|
||||
? id in tags
|
||||
? `md-tag-icon md-tag-icon--${tags[id]}`
|
||||
: "md-tag-icon"
|
||||
: ""
|
||||
return (
|
||||
<span class={`md-tag ${type}`}>{tag}</span>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
{document.tags && document.tags.map(tag => {
|
||||
const type = tags
|
||||
? tag in tags
|
||||
? `md-tag-icon md-tag-icon--${tags[tag]}`
|
||||
: "md-tag-icon"
|
||||
: ""
|
||||
return (
|
||||
<span class={`md-tag ${type}`}>{tag}</span>
|
||||
)
|
||||
})}
|
||||
{teaser > 0 && missing.length > 0 &&
|
||||
<p class="md-search-result__terms">
|
||||
{translation("search.result.term.missing")}: {...missing}
|
||||
@ -131,13 +119,18 @@ function renderSearchDocument(
|
||||
* @returns Element
|
||||
*/
|
||||
export function renderSearchResultItem(
|
||||
result: SearchResultItem
|
||||
result: SearchItem[]
|
||||
): HTMLElement {
|
||||
const threshold = result[0].score
|
||||
const docs = [...result]
|
||||
|
||||
const config = configuration()
|
||||
|
||||
/* Find and extract parent article */
|
||||
const parent = docs.findIndex(doc => !doc.location.includes("#"))
|
||||
const parent = docs.findIndex(doc => {
|
||||
const l = `${new URL(doc.location, config.base)}` // @todo hacky
|
||||
return !l.includes("#")
|
||||
})
|
||||
const [article] = docs.splice(parent, 1)
|
||||
|
||||
/* Determine last index above threshold */
|
||||
@ -156,10 +149,12 @@ export function renderSearchResultItem(
|
||||
...more.length ? [
|
||||
<details class="md-search-result__more">
|
||||
<summary tabIndex={-1}>
|
||||
{more.length > 0 && more.length === 1
|
||||
? translation("search.result.more.one")
|
||||
: translation("search.result.more.other", more.length)
|
||||
}
|
||||
<div>
|
||||
{more.length > 0 && more.length === 1
|
||||
? translation("search.result.more.one")
|
||||
: translation("search.result.more.other", more.length)
|
||||
}
|
||||
</div>
|
||||
</summary>
|
||||
{...more.map(section => renderSearchDocument(section, Flag.TEASER))}
|
||||
</details>
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2016-2021 Martin Donath <martin.donath@squidfunk.com>
|
||||
* Copyright (c) 2016-2022 Martin Donath <martin.donath@squidfunk.com>
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to
|
||||
|
@ -38,6 +38,7 @@ type Attributes =
|
||||
* Child element
|
||||
*/
|
||||
type Child =
|
||||
| ChildNode
|
||||
| HTMLElement
|
||||
| Text
|
||||
| string
|
||||
|
@ -21,4 +21,4 @@
|
||||
*/
|
||||
|
||||
export * from "./h"
|
||||
export * from "./string"
|
||||
export * from "./round"
|
||||
|
@ -24,28 +24,6 @@
|
||||
* Functions
|
||||
* ------------------------------------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Truncate a string after the given number of characters
|
||||
*
|
||||
* This is not a very reasonable approach, since the summaries kind of suck.
|
||||
* It would be better to create something more intelligent, highlighting the
|
||||
* search occurrences and making a better summary out of it, but this note was
|
||||
* written three years ago, so who knows if we'll ever fix it.
|
||||
*
|
||||
* @param value - Value to be truncated
|
||||
* @param n - Number of characters
|
||||
*
|
||||
* @returns Truncated value
|
||||
*/
|
||||
export function truncate(value: string, n: number): string {
|
||||
let i = n
|
||||
if (value.length > i) {
|
||||
while (value[i] !== " " && --i > 0) { /* keep eating */ }
|
||||
return `${value.substring(0, i)}...`
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Round a number for display with repository facts
|
||||
*
|
@ -41,26 +41,26 @@
|
||||
@import "main/icons";
|
||||
@import "main/typeset";
|
||||
|
||||
@import "main/layout/banner";
|
||||
@import "main/layout/base";
|
||||
@import "main/layout/clipboard";
|
||||
@import "main/layout/consent";
|
||||
@import "main/layout/content";
|
||||
@import "main/layout/dialog";
|
||||
@import "main/layout/feedback";
|
||||
@import "main/layout/footer";
|
||||
@import "main/layout/form";
|
||||
@import "main/layout/header";
|
||||
@import "main/layout/nav";
|
||||
@import "main/layout/search";
|
||||
@import "main/layout/select";
|
||||
@import "main/layout/sidebar";
|
||||
@import "main/layout/source";
|
||||
@import "main/layout/tabs";
|
||||
@import "main/layout/tag";
|
||||
@import "main/layout/tooltip";
|
||||
@import "main/layout/top";
|
||||
@import "main/layout/version";
|
||||
@import "main/components/banner";
|
||||
@import "main/components/base";
|
||||
@import "main/components/clipboard";
|
||||
@import "main/components/consent";
|
||||
@import "main/components/content";
|
||||
@import "main/components/dialog";
|
||||
@import "main/components/feedback";
|
||||
@import "main/components/footer";
|
||||
@import "main/components/form";
|
||||
@import "main/components/header";
|
||||
@import "main/components/nav";
|
||||
@import "main/components/search";
|
||||
@import "main/components/select";
|
||||
@import "main/components/sidebar";
|
||||
@import "main/components/source";
|
||||
@import "main/components/tabs";
|
||||
@import "main/components/tag";
|
||||
@import "main/components/tooltip";
|
||||
@import "main/components/top";
|
||||
@import "main/components/version";
|
||||
|
||||
@import "main/extensions/markdown/admonition";
|
||||
@import "main/extensions/markdown/footnotes";
|
||||
|
@ -42,7 +42,8 @@ body {
|
||||
|
||||
// Define default fonts
|
||||
body,
|
||||
input {
|
||||
input,
|
||||
aside {
|
||||
color: var(--md-typeset-color);
|
||||
font-feature-settings: "kern", "liga";
|
||||
font-family: var(--md-text-font-family);
|
||||
@ -52,7 +53,6 @@ input {
|
||||
code,
|
||||
pre,
|
||||
kbd {
|
||||
color: var(--md-typeset-color);
|
||||
font-feature-settings: "kern";
|
||||
font-family: var(--md-code-font-family);
|
||||
}
|
||||
|
@ -277,10 +277,14 @@
|
||||
text-overflow: clip;
|
||||
|
||||
// Search icon and placeholder
|
||||
+ .md-search__icon,
|
||||
&::placeholder {
|
||||
+ .md-search__icon {
|
||||
color: var(--md-default-fg-color--light);
|
||||
}
|
||||
|
||||
// Search placeholder
|
||||
&::placeholder {
|
||||
color: transparent;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -350,7 +354,7 @@
|
||||
}
|
||||
|
||||
// Search option buttons
|
||||
> * {
|
||||
> .md-icon {
|
||||
margin-inline-start: px2rem(4px);
|
||||
color: var(--md-default-fg-color--light);
|
||||
transform: scale(0.75);
|
||||
@ -365,7 +369,7 @@
|
||||
-webkit-tap-highlight-color: transparent;
|
||||
}
|
||||
|
||||
// Show reset button when search is active and input non-empty
|
||||
// Show buttons when search is active and input non-empty
|
||||
[data-md-toggle="search"]:checked ~ .md-header
|
||||
.md-search__input:valid ~ & {
|
||||
transform: scale(1);
|
||||
@ -556,31 +560,17 @@
|
||||
}
|
||||
}
|
||||
|
||||
// Search result more link
|
||||
&__more summary {
|
||||
// Search result more container
|
||||
&__more > summary {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
z-index: 1;
|
||||
display: block;
|
||||
padding: px2em(12px) px2rem(16px);
|
||||
color: var(--md-typeset-a-color);
|
||||
font-size: px2rem(12.8px);
|
||||
outline: none;
|
||||
cursor: pointer;
|
||||
transition:
|
||||
color 250ms,
|
||||
background-color 250ms;
|
||||
scroll-snap-align: start;
|
||||
|
||||
// [tablet landscape +]: Adjust spacing
|
||||
@include break-from-device(tablet landscape) {
|
||||
padding-inline-start: px2rem(44px);
|
||||
}
|
||||
|
||||
// Search result more link on focus/hover
|
||||
&:is(:focus, :hover) {
|
||||
color: var(--md-accent-fg-color);
|
||||
background-color: var(--md-accent-fg-color--transparent);
|
||||
}
|
||||
|
||||
// Hide native details marker - modern
|
||||
// Hide native details marker
|
||||
&::marker {
|
||||
display: none;
|
||||
}
|
||||
@ -591,10 +581,32 @@
|
||||
display: none;
|
||||
}
|
||||
|
||||
// Adjust transparency of less relevant results
|
||||
~ * > * {
|
||||
opacity: 0.65;
|
||||
// Search result more button
|
||||
> div {
|
||||
padding: px2em(12px) px2rem(16px);
|
||||
color: var(--md-typeset-a-color);
|
||||
font-size: px2rem(12.8px);
|
||||
transition:
|
||||
color 250ms,
|
||||
background-color 250ms;
|
||||
|
||||
// [tablet landscape +]: Adjust spacing
|
||||
@include break-from-device(tablet landscape) {
|
||||
padding-inline-start: px2rem(44px);
|
||||
}
|
||||
}
|
||||
|
||||
// Search result more link on focus/hover
|
||||
&:is(:focus, :hover) > div {
|
||||
color: var(--md-accent-fg-color);
|
||||
background-color: var(--md-accent-fg-color--transparent);
|
||||
}
|
||||
}
|
||||
|
||||
// Adjust background for more container in open state
|
||||
&__more[open] > summary {
|
||||
background-color: var(--md-default-bg-color);
|
||||
// box-shadow: 0 px2rem(-1px) hsla(0, 0%, 0%, 0.07) inset;
|
||||
}
|
||||
|
||||
// Search result article
|
||||
@ -607,18 +619,6 @@
|
||||
@include break-from-device(tablet landscape) {
|
||||
padding-inline-start: px2rem(44px);
|
||||
}
|
||||
|
||||
// Search result article document
|
||||
&--document {
|
||||
|
||||
// Search result title
|
||||
.md-search-result__title {
|
||||
margin: px2rem(11px) 0;
|
||||
font-weight: 400;
|
||||
font-size: px2rem(16px);
|
||||
line-height: 1.4;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Search result icon
|
||||
@ -654,49 +654,46 @@
|
||||
}
|
||||
}
|
||||
|
||||
// Search result title
|
||||
&__title {
|
||||
margin: 0.5em 0;
|
||||
font-weight: 700;
|
||||
font-size: px2rem(12.8px);
|
||||
line-height: 1.6;
|
||||
}
|
||||
|
||||
// Search result teaser
|
||||
&__teaser {
|
||||
display: -webkit-box;
|
||||
max-height: px2rem(40px);
|
||||
margin: 0.5em 0;
|
||||
overflow: hidden;
|
||||
// Typesetted content
|
||||
.md-typeset {
|
||||
color: var(--md-default-fg-color--light);
|
||||
font-size: px2rem(12.8px);
|
||||
line-height: 1.6;
|
||||
text-overflow: ellipsis;
|
||||
-webkit-box-orient: vertical;
|
||||
-webkit-line-clamp: 2;
|
||||
|
||||
// [mobile -]: Adjust number of lines
|
||||
@include break-to-device(mobile) {
|
||||
max-height: px2rem(60px);
|
||||
-webkit-line-clamp: 3;
|
||||
// Search result article title
|
||||
h1 {
|
||||
margin: px2rem(11px) 0;
|
||||
color: var(--md-default-fg-color);
|
||||
font-weight: 400;
|
||||
font-size: px2rem(16px);
|
||||
line-height: 1.4;
|
||||
|
||||
// Search term highlighting
|
||||
mark {
|
||||
text-decoration: none;
|
||||
}
|
||||
}
|
||||
|
||||
// [tablet landscape]: Adjust number of lines
|
||||
@include break-at-device(tablet landscape) {
|
||||
max-height: px2rem(60px);
|
||||
-webkit-line-clamp: 3;
|
||||
}
|
||||
// Search result section title
|
||||
h2 {
|
||||
margin: 0.5em 0;
|
||||
color: var(--md-default-fg-color);
|
||||
font-weight: 700;
|
||||
font-size: px2rem(12.8px);
|
||||
line-height: 1.6;
|
||||
|
||||
// Search term highlighting
|
||||
mark {
|
||||
text-decoration: underline;
|
||||
background-color: transparent;
|
||||
// Search term highlighting
|
||||
mark {
|
||||
text-decoration: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Search result terms
|
||||
&__terms {
|
||||
display: block;
|
||||
margin: 0.5em 0;
|
||||
color: var(--md-default-fg-color);
|
||||
font-size: px2rem(12.8px);
|
||||
font-style: italic;
|
||||
}
|
||||
@ -704,6 +701,7 @@
|
||||
// Search term highlighting
|
||||
mark {
|
||||
color: var(--md-accent-fg-color);
|
||||
text-decoration: underline;
|
||||
background-color: transparent;
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user