mirror of
https://github.com/slsdetectorgroup/slsDetectorPackage.git
synced 2026-01-21 10:24:32 +01:00
Update Documentation for developer
This commit is contained in:
@@ -41,11 +41,12 @@ if (typeof Scorer === "undefined") {
|
||||
}
|
||||
|
||||
// Global search result kind enum, used by themes to style search results.
|
||||
// prettier-ignore
|
||||
class SearchResultKind {
|
||||
static get index() { return "index"; }
|
||||
static get object() { return "object"; }
|
||||
static get text() { return "text"; }
|
||||
static get title() { return "title"; }
|
||||
static get index() { return "index"; }
|
||||
static get object() { return "object"; }
|
||||
static get text() { return "text"; }
|
||||
static get title() { return "title"; }
|
||||
}
|
||||
|
||||
const _removeChildren = (element) => {
|
||||
@@ -58,6 +59,15 @@ const _removeChildren = (element) => {
|
||||
const _escapeRegExp = (string) =>
|
||||
string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||
|
||||
const _escapeHTML = (text) => {
|
||||
return text
|
||||
.replaceAll("&", "&")
|
||||
.replaceAll("<", "<")
|
||||
.replaceAll(">", ">")
|
||||
.replaceAll('"', """)
|
||||
.replaceAll("'", "'");
|
||||
};
|
||||
|
||||
const _displayItem = (item, searchTerms, highlightTerms) => {
|
||||
const docBuilder = DOCUMENTATION_OPTIONS.BUILDER;
|
||||
const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX;
|
||||
@@ -90,25 +100,30 @@ const _displayItem = (item, searchTerms, highlightTerms) => {
|
||||
let linkEl = listItem.appendChild(document.createElement("a"));
|
||||
linkEl.href = linkUrl + anchor;
|
||||
linkEl.dataset.score = score;
|
||||
linkEl.innerHTML = title;
|
||||
linkEl.innerHTML = _escapeHTML(title);
|
||||
if (descr) {
|
||||
listItem.appendChild(document.createElement("span")).innerHTML =
|
||||
" (" + descr + ")";
|
||||
` (${_escapeHTML(descr)})`;
|
||||
// highlight search terms in the description
|
||||
if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js
|
||||
highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted"));
|
||||
}
|
||||
else if (showSearchSummary)
|
||||
if (SPHINX_HIGHLIGHT_ENABLED)
|
||||
// SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js
|
||||
highlightTerms.forEach((term) =>
|
||||
_highlightText(listItem, term, "highlighted"),
|
||||
);
|
||||
} else if (showSearchSummary)
|
||||
fetch(requestUrl)
|
||||
.then((responseData) => responseData.text())
|
||||
.then((data) => {
|
||||
if (data)
|
||||
listItem.appendChild(
|
||||
Search.makeSearchSummary(data, searchTerms, anchor)
|
||||
Search.makeSearchSummary(data, searchTerms, anchor),
|
||||
);
|
||||
// highlight search terms in the summary
|
||||
if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js
|
||||
highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted"));
|
||||
if (SPHINX_HIGHLIGHT_ENABLED)
|
||||
// SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js
|
||||
highlightTerms.forEach((term) =>
|
||||
_highlightText(listItem, term, "highlighted"),
|
||||
);
|
||||
});
|
||||
Search.output.appendChild(listItem);
|
||||
};
|
||||
@@ -117,14 +132,14 @@ const _finishSearch = (resultCount) => {
|
||||
Search.title.innerText = _("Search Results");
|
||||
if (!resultCount)
|
||||
Search.status.innerText = Documentation.gettext(
|
||||
"Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories."
|
||||
"Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.",
|
||||
);
|
||||
else
|
||||
Search.status.innerText = Documentation.ngettext(
|
||||
"Search finished, found one page matching the search query.",
|
||||
"Search finished, found ${resultCount} pages matching the search query.",
|
||||
resultCount,
|
||||
).replace('${resultCount}', resultCount);
|
||||
).replace("${resultCount}", resultCount);
|
||||
};
|
||||
const _displayNextItem = (
|
||||
results,
|
||||
@@ -138,7 +153,7 @@ const _displayNextItem = (
|
||||
_displayItem(results.pop(), searchTerms, highlightTerms);
|
||||
setTimeout(
|
||||
() => _displayNextItem(results, resultCount, searchTerms, highlightTerms),
|
||||
5
|
||||
5,
|
||||
);
|
||||
}
|
||||
// search finished, update title and status message
|
||||
@@ -170,9 +185,10 @@ const _orderResultsByScoreThenName = (a, b) => {
|
||||
* This is the same as ``\W+`` in Python, preserving the surrogate pair area.
|
||||
*/
|
||||
if (typeof splitQuery === "undefined") {
|
||||
var splitQuery = (query) => query
|
||||
var splitQuery = (query) =>
|
||||
query
|
||||
.split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu)
|
||||
.filter(term => term) // remove remaining empty strings
|
||||
.filter((term) => term); // remove remaining empty strings
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -184,16 +200,23 @@ const Search = {
|
||||
_pulse_status: -1,
|
||||
|
||||
htmlToText: (htmlString, anchor) => {
|
||||
const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html');
|
||||
const htmlElement = new DOMParser().parseFromString(
|
||||
htmlString,
|
||||
"text/html",
|
||||
);
|
||||
for (const removalQuery of [".headerlink", "script", "style"]) {
|
||||
htmlElement.querySelectorAll(removalQuery).forEach((el) => { el.remove() });
|
||||
htmlElement.querySelectorAll(removalQuery).forEach((el) => {
|
||||
el.remove();
|
||||
});
|
||||
}
|
||||
if (anchor) {
|
||||
const anchorContent = htmlElement.querySelector(`[role="main"] ${anchor}`);
|
||||
const anchorContent = htmlElement.querySelector(
|
||||
`[role="main"] ${anchor}`,
|
||||
);
|
||||
if (anchorContent) return anchorContent.textContent;
|
||||
|
||||
console.warn(
|
||||
`Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.`
|
||||
`Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -202,7 +225,7 @@ const Search = {
|
||||
if (docContent) return docContent.textContent;
|
||||
|
||||
console.warn(
|
||||
"Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template."
|
||||
"Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template.",
|
||||
);
|
||||
return "";
|
||||
},
|
||||
@@ -287,12 +310,8 @@ const Search = {
|
||||
const queryTermLower = queryTerm.toLowerCase();
|
||||
|
||||
// maybe skip this "word"
|
||||
// stopwords array is from language_data.js
|
||||
if (
|
||||
stopwords.indexOf(queryTermLower) !== -1 ||
|
||||
queryTerm.match(/^\d+$/)
|
||||
)
|
||||
return;
|
||||
// stopwords set is from language_data.js
|
||||
if (stopwords.has(queryTermLower) || queryTerm.match(/^\d+$/)) return;
|
||||
|
||||
// stem the word
|
||||
let word = stemmer.stemWord(queryTermLower);
|
||||
@@ -304,8 +323,12 @@ const Search = {
|
||||
}
|
||||
});
|
||||
|
||||
if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js
|
||||
localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" "))
|
||||
if (SPHINX_HIGHLIGHT_ENABLED) {
|
||||
// SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js
|
||||
localStorage.setItem(
|
||||
"sphinx_highlight_terms",
|
||||
[...highlightTerms].join(" "),
|
||||
);
|
||||
}
|
||||
|
||||
// console.debug("SEARCH: searching for:");
|
||||
@@ -318,7 +341,13 @@ const Search = {
|
||||
/**
|
||||
* execute search (requires search index to be loaded)
|
||||
*/
|
||||
_performSearch: (query, searchTerms, excludedTerms, highlightTerms, objectTerms) => {
|
||||
_performSearch: (
|
||||
query,
|
||||
searchTerms,
|
||||
excludedTerms,
|
||||
highlightTerms,
|
||||
objectTerms,
|
||||
) => {
|
||||
const filenames = Search._index.filenames;
|
||||
const docNames = Search._index.docnames;
|
||||
const titles = Search._index.titles;
|
||||
@@ -334,10 +363,15 @@ const Search = {
|
||||
|
||||
const queryLower = query.toLowerCase().trim();
|
||||
for (const [title, foundTitles] of Object.entries(allTitles)) {
|
||||
if (title.toLowerCase().trim().includes(queryLower) && (queryLower.length >= title.length/2)) {
|
||||
if (
|
||||
title.toLowerCase().trim().includes(queryLower)
|
||||
&& queryLower.length >= title.length / 2
|
||||
) {
|
||||
for (const [file, id] of foundTitles) {
|
||||
const score = Math.round(Scorer.title * queryLower.length / title.length);
|
||||
const boost = titles[file] === title ? 1 : 0; // add a boost for document titles
|
||||
const score = Math.round(
|
||||
(Scorer.title * queryLower.length) / title.length,
|
||||
);
|
||||
const boost = titles[file] === title ? 1 : 0; // add a boost for document titles
|
||||
normalResults.push([
|
||||
docNames[file],
|
||||
titles[file] !== title ? `${titles[file]} > ${title}` : title,
|
||||
@@ -353,9 +387,9 @@ const Search = {
|
||||
|
||||
// search for explicit entries in index directives
|
||||
for (const [entry, foundEntries] of Object.entries(indexEntries)) {
|
||||
if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) {
|
||||
if (entry.includes(queryLower) && queryLower.length >= entry.length / 2) {
|
||||
for (const [file, id, isMain] of foundEntries) {
|
||||
const score = Math.round(100 * queryLower.length / entry.length);
|
||||
const score = Math.round((100 * queryLower.length) / entry.length);
|
||||
const result = [
|
||||
docNames[file],
|
||||
titles[file],
|
||||
@@ -376,11 +410,13 @@ const Search = {
|
||||
|
||||
// lookup as object
|
||||
objectTerms.forEach((term) =>
|
||||
normalResults.push(...Search.performObjectSearch(term, objectTerms))
|
||||
normalResults.push(...Search.performObjectSearch(term, objectTerms)),
|
||||
);
|
||||
|
||||
// lookup as search terms in fulltext
|
||||
normalResults.push(...Search.performTermsSearch(searchTerms, excludedTerms));
|
||||
normalResults.push(
|
||||
...Search.performTermsSearch(searchTerms, excludedTerms),
|
||||
);
|
||||
|
||||
// let the scorer override scores with a custom scoring function
|
||||
if (Scorer.score) {
|
||||
@@ -401,7 +437,11 @@ const Search = {
|
||||
// note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept
|
||||
let seen = new Set();
|
||||
results = results.reverse().reduce((acc, result) => {
|
||||
let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(',');
|
||||
let resultStr = result
|
||||
.slice(0, 4)
|
||||
.concat([result[5]])
|
||||
.map((v) => String(v))
|
||||
.join(",");
|
||||
if (!seen.has(resultStr)) {
|
||||
acc.push(result);
|
||||
seen.add(resultStr);
|
||||
@@ -413,8 +453,20 @@ const Search = {
|
||||
},
|
||||
|
||||
query: (query) => {
|
||||
const [searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms] = Search._parseQuery(query);
|
||||
const results = Search._performSearch(searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms);
|
||||
const [
|
||||
searchQuery,
|
||||
searchTerms,
|
||||
excludedTerms,
|
||||
highlightTerms,
|
||||
objectTerms,
|
||||
] = Search._parseQuery(query);
|
||||
const results = Search._performSearch(
|
||||
searchQuery,
|
||||
searchTerms,
|
||||
excludedTerms,
|
||||
highlightTerms,
|
||||
objectTerms,
|
||||
);
|
||||
|
||||
// for debugging
|
||||
//Search.lastresults = results.slice(); // a copy
|
||||
@@ -437,7 +489,7 @@ const Search = {
|
||||
const results = [];
|
||||
|
||||
const objectSearchCallback = (prefix, match) => {
|
||||
const name = match[4]
|
||||
const name = match[4];
|
||||
const fullname = (prefix ? prefix + "." : "") + name;
|
||||
const fullnameLower = fullname.toLowerCase();
|
||||
if (fullnameLower.indexOf(object) < 0) return;
|
||||
@@ -489,9 +541,7 @@ const Search = {
|
||||
]);
|
||||
};
|
||||
Object.keys(objects).forEach((prefix) =>
|
||||
objects[prefix].forEach((array) =>
|
||||
objectSearchCallback(prefix, array)
|
||||
)
|
||||
objects[prefix].forEach((array) => objectSearchCallback(prefix, array)),
|
||||
);
|
||||
return results;
|
||||
},
|
||||
@@ -516,8 +566,14 @@ const Search = {
|
||||
// find documents, if any, containing the query word in their text/title term indices
|
||||
// use Object.hasOwnProperty to avoid mismatching against prototype properties
|
||||
const arr = [
|
||||
{ files: terms.hasOwnProperty(word) ? terms[word] : undefined, score: Scorer.term },
|
||||
{ files: titleTerms.hasOwnProperty(word) ? titleTerms[word] : undefined, score: Scorer.title },
|
||||
{
|
||||
files: terms.hasOwnProperty(word) ? terms[word] : undefined,
|
||||
score: Scorer.term,
|
||||
},
|
||||
{
|
||||
files: titleTerms.hasOwnProperty(word) ? titleTerms[word] : undefined,
|
||||
score: Scorer.title,
|
||||
},
|
||||
];
|
||||
// add support for partial matches
|
||||
if (word.length > 2) {
|
||||
@@ -558,7 +614,8 @@ const Search = {
|
||||
// create the mapping
|
||||
files.forEach((file) => {
|
||||
if (!fileMap.has(file)) fileMap.set(file, [word]);
|
||||
else if (fileMap.get(file).indexOf(word) === -1) fileMap.get(file).push(word);
|
||||
else if (fileMap.get(file).indexOf(word) === -1)
|
||||
fileMap.get(file).push(word);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -569,11 +626,11 @@ const Search = {
|
||||
|
||||
// as search terms with length < 3 are discarded
|
||||
const filteredTermCount = [...searchTerms].filter(
|
||||
(term) => term.length > 2
|
||||
(term) => term.length > 2,
|
||||
).length;
|
||||
if (
|
||||
wordList.length !== searchTerms.size &&
|
||||
wordList.length !== filteredTermCount
|
||||
wordList.length !== searchTerms.size
|
||||
&& wordList.length !== filteredTermCount
|
||||
)
|
||||
continue;
|
||||
|
||||
@@ -581,10 +638,10 @@ const Search = {
|
||||
if (
|
||||
[...excludedTerms].some(
|
||||
(term) =>
|
||||
terms[term] === file ||
|
||||
titleTerms[term] === file ||
|
||||
(terms[term] || []).includes(file) ||
|
||||
(titleTerms[term] || []).includes(file)
|
||||
terms[term] === file
|
||||
|| titleTerms[term] === file
|
||||
|| (terms[term] || []).includes(file)
|
||||
|| (titleTerms[term] || []).includes(file),
|
||||
)
|
||||
)
|
||||
break;
|
||||
@@ -626,7 +683,8 @@ const Search = {
|
||||
|
||||
let summary = document.createElement("p");
|
||||
summary.classList.add("context");
|
||||
summary.textContent = top + text.substr(startWithContext, 240).trim() + tail;
|
||||
summary.textContent =
|
||||
top + text.substr(startWithContext, 240).trim() + tail;
|
||||
|
||||
return summary;
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user