mirror of
https://github.com/huhu/rust-search-extension
synced 2024-11-13 23:27:13 +00:00
Support description shards (#285)
* Load desc shards * Add searchState compatible js * Bump std search index * Feat DescShardManager * Integrate DocSearchV2 * Store desc shards into storage * Remove legacy DocSearch * Fix nightly search index * Add std desc shards * Split desc shards setter * Compatible with libdocrust DocSearch for searchState * Polish desc shards index format * Load search.js to get descShards * Store searchIndex as json array format * Support source mode for std docs search * Support add new crate's searchIndex * Move loadScript to script/lib.js * Check crate version compatibility in docs.rs
This commit is contained in:
parent
46afbd0e1d
commit
3597c311a0
19 changed files with 3608 additions and 811 deletions
|
@ -8,5 +8,5 @@ export {
|
|||
storage,
|
||||
settings,
|
||||
IndexSetter,
|
||||
CrateDocManager
|
||||
CrateDocManager,
|
||||
}
|
|
@ -1 +1 @@
|
|||
var rse=(()=>{var g=Object.defineProperty;var x=Object.getOwnPropertyDescriptor;var p=Object.getOwnPropertyNames;var w=Object.prototype.hasOwnProperty;var h=(t,e)=>{for(var s in e)g(t,s,{get:e[s],enumerable:!0})},y=(t,e,s,i)=>{if(e&&typeof e=="object"||typeof e=="function")for(let r of p(e))!w.call(t,r)&&r!==s&&g(t,r,{get:()=>e[r],enumerable:!(i=x(e,r))||i.enumerable});return t};var C=t=>y(g({},"__esModule",{value:!0}),t);var n=(t,e,s)=>new Promise((i,r)=>{var d=c=>{try{o(s.next(c))}catch(u){r(u)}},l=c=>{try{o(s.throw(c))}catch(u){r(u)}},o=c=>c.done?i(c.value):Promise.resolve(c.value).then(d,l);o((s=s.apply(t,e)).next())});var v={};h(v,{CrateDocManager:()=>I,IndexSetter:()=>m,settings:()=>f,storage:()=>a});var S={getAllItems:()=>new Promise(t=>{chrome.storage.local.get(null,t)}),getItem:t=>new Promise(e=>{chrome.storage.local.get(t,s=>{e(s?s[t]:null)})}),setItem:(t,e)=>new Promise(s=>{chrome.storage.local.set({[t]:e},s)}),removeItem:t=>new Promise(e=>{chrome.storage.local.remove(t,e)})};var a=S;var R={get autoUpdate(){return n(this,null,function*(){return(yield a.getItem("auto-update"))||!1})},set autoUpdate(t){a.setItem("auto-update",t)},get isOfflineMode(){return n(this,null,function*(){return(yield a.getItem("offline-mode"))||!1})},set isOfflineMode(t){a.setItem("offline-mode",t)},get offlineDocPath(){return n(this,null,function*(){return(yield a.getItem("offline-path"))||""})},set offlineDocPath(t){a.setItem("offline-path",t)},get crateRegistry(){return n(this,null,function*(){return(yield a.getItem("crate-registry"))||"crates.io"})},set crateRegistry(t){a.setItem("crate-registry",t)},get defaultSearch(){return n(this,null,function*(){return(yield a.getItem("default-search"))||{thirdPartyDocs:!1,docsRs:!0,attributes:!0}})},set defaultSearch(t){a.setItem("default-search",t)},get showMacroRailroad(){return n(this,null,function*(){let t=yield a.getItem("show-macro-railroad");return t===void 0?!0:t})},set showMacroRailroad(t){a.setItem("show-macro-railroad",t)},get keepCratesUpToDate(){return n(this,null,function*(){let t=yield a.getItem("keep-crates-up-to-date");return t===void 0?!1:t})},set keepCratesUpToDate(t){a.setItem("keep-crates-up-to-date",t)}},f=R;var m=class t{static setStdStableIndex(e){a.setItem("index-std-stable",e)}static setStdNightlyIndex(e){a.setItem("index-std-nightly",e)}static setBookIndex(e){a.setItem("index-book",e)}static setLabelIndex(e){a.setItem("index-label",e)}static setRfcIndex(e){a.setItem("index-rfc",e)}static setCrateMapping(e){a.setItem("index-crate-mapping",e)}static setCrateIndex(e){a.setItem("index-crate",e)}static setLintIndex(e){a.setItem("index-lint",e)}static setCaniuseIndex(e){a.setItem("index-caniuse",e)}static setRustcIndex(e){a.setItem("index-rustc",e)}static setTargetIndex(e){a.setItem("index-target",e)}static setCommandIndex(e){a.setItem("index-command",e)}static updateAllIndex(){t.setBookIndex(booksIndex),t.setCaniuseIndex(caniuseIndex),t.setCommandIndex(commandsIndex),t.setCrateIndex(crateIndex),t.setCrateMapping(mapping),t.setLabelIndex(labelsIndex),t.setLintIndex(lintsIndex),t.setRfcIndex(rfcsIndex),t.setRustcIndex(rustcIndex),t.setStdStableIndex(searchIndex),t.setTargetIndex(targetsIndex)}};var I=class t{static getCrates(){return n(this,null,function*(){return(yield a.getItem("crates"))||{}})}static getCrateByName(e){return n(this,null,function*(){let s=yield t.getCrates();if(s[e])return s[e];{let i=Object.entries(s).find(([r,{crateName:d}])=>d==e);return i?i[1]:null}})}static getCrateSearchIndex(e){return n(this,null,function*(){let s=yield a.getItem(`@${e}`);if(s)return s;{let i=yield t.getCrates(),r=Object.entries(i).find(([d,{crateName:l}])=>l==e);if(r){let d=r[0];return yield a.getItem(`@${d}`)}else return null}})}static addCrate(d){return n(this,arguments,function*({libName:e,crateVersion:s,searchIndex:i,crateName:r}){if(i&&e in i){yield a.setItem(`@${e}`,i);let l=i[e].doc,o=yield t.getCrates();e in o?o[e]={version:s,doc:l,time:o[e].time,crateName:r}:o[e]={version:s,doc:l,time:Date.now(),crateName:r},yield a.setItem("crates",o)}})}static removeCrate(e){return n(this,null,function*(){let s=yield t.getCrates();delete s[e],yield a.setItem("crates",s),yield a.removeItem(`@${e}`)})}};return C(v);})();
|
||||
var rse=(()=>{var u=Object.defineProperty;var p=Object.getOwnPropertyDescriptor;var w=Object.getOwnPropertyNames;var h=Object.prototype.hasOwnProperty;var y=(t,e)=>{for(var s in e)u(t,s,{get:e[s],enumerable:!0})},C=(t,e,s,i)=>{if(e&&typeof e=="object"||typeof e=="function")for(let r of w(e))!h.call(t,r)&&r!==s&&u(t,r,{get:()=>e[r],enumerable:!(i=p(e,r))||i.enumerable});return t};var S=t=>C(u({},"__esModule",{value:!0}),t);var n=(t,e,s)=>new Promise((i,r)=>{var c=o=>{try{g(s.next(o))}catch(d){r(d)}},l=o=>{try{g(s.throw(o))}catch(d){r(d)}},g=o=>o.done?i(o.value):Promise.resolve(o.value).then(c,l);g((s=s.apply(t,e)).next())});var b={};y(b,{CrateDocManager:()=>I,IndexSetter:()=>m,settings:()=>x,storage:()=>a});var f=null;chrome&&chrome.storage?f={getAllItems:()=>new Promise(t=>{chrome.storage.local.get(null,t)}),getItem:t=>new Promise(e=>{chrome.storage.local.get(t,s=>{e(s?s[t]:null)})}),setItem:(t,e)=>new Promise(s=>{chrome.storage.local.set({[t]:e},s)}),removeItem:t=>new Promise(e=>{chrome.storage.local.remove(t,e)})}:f={getAllItems:()=>new Promise(t=>{}),getItem:t=>new Promise(e=>{let s=localStorage.getItem(t);if(s)try{s=JSON.parse(s)}catch(i){}e(s)}),setItem:(t,e)=>new Promise(s=>{e instanceof Object&&(e=JSON.stringify(e)),localStorage.setItem(t,e),s()}),removeItem:t=>new Promise(e=>{localStorage.removeItem(t),e()})};var a=f;var P={get autoUpdate(){return n(this,null,function*(){return(yield a.getItem("auto-update"))||!1})},set autoUpdate(t){a.setItem("auto-update",t)},get isOfflineMode(){return n(this,null,function*(){return(yield a.getItem("offline-mode"))||!1})},set isOfflineMode(t){a.setItem("offline-mode",t)},get offlineDocPath(){return n(this,null,function*(){return(yield a.getItem("offline-path"))||""})},set offlineDocPath(t){a.setItem("offline-path",t)},get crateRegistry(){return n(this,null,function*(){return(yield a.getItem("crate-registry"))||"crates.io"})},set crateRegistry(t){a.setItem("crate-registry",t)},get defaultSearch(){return n(this,null,function*(){return(yield a.getItem("default-search"))||{thirdPartyDocs:!1,docsRs:!0,attributes:!0}})},set defaultSearch(t){a.setItem("default-search",t)},get showMacroRailroad(){return n(this,null,function*(){let t=yield a.getItem("show-macro-railroad");return t===void 0?!0:t})},set showMacroRailroad(t){a.setItem("show-macro-railroad",t)},get keepCratesUpToDate(){return n(this,null,function*(){let t=yield a.getItem("keep-crates-up-to-date");return t===void 0?!1:t})},set keepCratesUpToDate(t){a.setItem("keep-crates-up-to-date",t)}},x=P;var m=class t{static setStdStableIndex(e){a.setItem("index-std-stable",e)}static setStdNightlyIndex(e){a.setItem("index-std-nightly",e)}static setDescShards(e,s){s&&a.setItem(`desc-shards-${e}`,s)}static setBookIndex(e){a.setItem("index-book",e)}static setLabelIndex(e){a.setItem("index-label",e)}static setRfcIndex(e){a.setItem("index-rfc",e)}static setCrateMapping(e){a.setItem("index-crate-mapping",e)}static setCrateIndex(e){a.setItem("index-crate",e)}static setLintIndex(e){a.setItem("index-lint",e)}static setCaniuseIndex(e){a.setItem("index-caniuse",e)}static setRustcIndex(e){a.setItem("index-rustc",e)}static setTargetIndex(e){a.setItem("index-target",e)}static setCommandIndex(e){a.setItem("index-command",e)}static updateAllIndex(){t.setBookIndex(booksIndex),t.setCaniuseIndex(caniuseIndex),t.setCommandIndex(commandsIndex),t.setCrateIndex(crateIndex),t.setCrateMapping(mapping),t.setLabelIndex(labelsIndex),t.setLintIndex(lintsIndex),t.setRfcIndex(rfcsIndex),t.setRustcIndex(rustcIndex),t.setStdStableIndex(searchIndex),t.setTargetIndex(targetsIndex)}};var I=class t{static getCrates(){return n(this,null,function*(){return(yield a.getItem("crates"))||{}})}static getCrateByName(e){return n(this,null,function*(){let s=yield t.getCrates();if(s[e])return s[e];{let i=Object.entries(s).find(([r,{crateName:c}])=>c==e);return i?i[1]:null}})}static getCrateSearchIndex(e){return n(this,null,function*(){let s=yield a.getItem(`@${e}`);if(s)return s;{let i=yield t.getCrates(),r=Object.entries(i).find(([c,{crateName:l}])=>l==e);if(r){let c=r[0];return new Map(yield a.getItem(`@${c}`))}else return null}})}static addCrate(g){return n(this,arguments,function*({libName:e,crateVersion:s,crateTitle:i,searchIndex:r,crateName:c,descShards:l}){yield a.setItem(`@${e}`,r);let o=i,d=yield t.getCrates();e in d?d[e]={version:s,doc:o,time:d[e].time,crateName:c}:d[e]={version:s,doc:o,time:Date.now(),crateName:c},yield a.setItem("crates",d),m.setDescShards(e,l)})}static removeCrate(e){return n(this,null,function*(){let s=yield t.getCrates();delete s[e],yield a.setItem("crates",s),yield a.removeItem(`@${e}`),yield a.removeItem(`desc-shards-${e}`)})}};return S(b);})();
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import storage from "./core/storage.js";
|
||||
import IndexSetter from "./index-setter.js";
|
||||
|
||||
export default class CrateDocManager {
|
||||
static async getCrates() {
|
||||
|
@ -30,7 +31,7 @@ export default class CrateDocManager {
|
|||
let crate = Object.entries(crates).find(([_, { crateName }]) => crateName == name);
|
||||
if (crate) {
|
||||
let libName = crate[0];
|
||||
return await storage.getItem(`@${libName}`);
|
||||
return new Map(await storage.getItem(`@${libName}`));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
@ -43,20 +44,19 @@ export default class CrateDocManager {
|
|||
//
|
||||
// Here is the rule: https://docs.rs/{crateName}/{crateVersion}/{libName}
|
||||
//
|
||||
// Ensure `searchIndex` is a Object, not a Map.
|
||||
static async addCrate({ libName, crateVersion, searchIndex, crateName }) {
|
||||
if (searchIndex && libName in searchIndex) {
|
||||
await storage.setItem(`@${libName}`, searchIndex);
|
||||
let doc = searchIndex[libName]["doc"];
|
||||
let crates = await CrateDocManager.getCrates();
|
||||
if (libName in crates) {
|
||||
// Don't override the time if the crate exists
|
||||
crates[libName] = { version: crateVersion, doc, time: crates[libName].time, crateName };
|
||||
} else {
|
||||
crates[libName] = { version: crateVersion, doc, time: Date.now(), crateName };
|
||||
}
|
||||
await storage.setItem("crates", crates);
|
||||
// The caller should ensure `searchIndex` is a Map, not a Object.
|
||||
static async addCrate({ libName, crateVersion, crateTitle, searchIndex, crateName, descShards }) {
|
||||
await storage.setItem(`@${libName}`, searchIndex);
|
||||
let doc = crateTitle;
|
||||
let crates = await CrateDocManager.getCrates();
|
||||
if (libName in crates) {
|
||||
// Don't override the time if the crate exists
|
||||
crates[libName] = { version: crateVersion, doc, time: crates[libName].time, crateName };
|
||||
} else {
|
||||
crates[libName] = { version: crateVersion, doc, time: Date.now(), crateName };
|
||||
}
|
||||
await storage.setItem("crates", crates);
|
||||
IndexSetter.setDescShards(libName, descShards);
|
||||
}
|
||||
|
||||
static async removeCrate(name) {
|
||||
|
@ -64,5 +64,6 @@ export default class CrateDocManager {
|
|||
delete crates[name];
|
||||
await storage.setItem("crates", crates);
|
||||
await storage.removeItem(`@${name}`);
|
||||
await storage.removeItem(`desc-shards-${name}`);
|
||||
}
|
||||
};
|
|
@ -7,6 +7,7 @@ import rfcsIndex from "./index/rfcs.js";
|
|||
import rustcIndex from "./index/rustc.js";
|
||||
import targetsIndex from "./index/targets.js";
|
||||
import searchIndex from "./index/std-docs.js";
|
||||
import stdDescShards from "./index/desc-shards/std.js";
|
||||
import { mapping, crateIndex } from "./index/crates.js";
|
||||
import storage from "./core/storage.js";
|
||||
import IndexSetter from "./index-setter.js";
|
||||
|
@ -18,13 +19,32 @@ import IndexSetter from "./index-setter.js";
|
|||
|
||||
export default class IndexManager extends IndexSetter {
|
||||
static async getStdStableIndex() {
|
||||
// Convert default map searchIndex to Object since rust 1.76.0
|
||||
return await storage.getItem('index-std-stable') || Object.fromEntries(searchIndex);
|
||||
let index = await storage.getItem('index-std-stable');
|
||||
if (index?.length > 0) {
|
||||
return new Map(index);
|
||||
} else {
|
||||
return searchIndex;
|
||||
}
|
||||
}
|
||||
|
||||
static async getStdNightlyIndex() {
|
||||
// Convert default map searchIndex to Object since rust 1.76.0
|
||||
return await storage.getItem('index-std-nightly') || Object.fromEntries(searchIndex);
|
||||
let index = await storage.getItem('index-std-nightly');
|
||||
if (index?.length > 0) {
|
||||
return new Map(index);
|
||||
} else {
|
||||
// Structure clone search index is required
|
||||
return structuredClone(searchIndex);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static async getDescShards(crate) {
|
||||
let descShards = await storage.getItem(`desc-shards-${crate}`);
|
||||
if (descShards) {
|
||||
return new Map(descShards);
|
||||
} else {
|
||||
return stdDescShards;
|
||||
}
|
||||
}
|
||||
|
||||
static async getBookIndex() {
|
||||
|
|
|
@ -9,6 +9,12 @@ export default class IndexSetter {
|
|||
storage.setItem('index-std-nightly', index);
|
||||
}
|
||||
|
||||
static setDescShards(crate, shards) {
|
||||
if (shards) {
|
||||
storage.setItem(`desc-shards-${crate}`, shards);
|
||||
}
|
||||
}
|
||||
|
||||
static setBookIndex(index) {
|
||||
storage.setItem('index-book', index);
|
||||
}
|
||||
|
|
2
extension/index/desc-shards/std.js
Normal file
2
extension/index/desc-shards/std.js
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -21,10 +21,6 @@ export default class RustSearchOmnibox {
|
|||
lintSearcher,
|
||||
commandManager,
|
||||
}) {
|
||||
// All dynamic setting items. Those items will been updated
|
||||
// in chrome.storage.onchange listener callback.
|
||||
let isOfflineMode = await settings.isOfflineMode;
|
||||
|
||||
function formatDoc(index, doc) {
|
||||
let content = doc.href;
|
||||
let description = doc.displayPath + `<match>${doc.name}</match>`;
|
||||
|
@ -32,12 +28,6 @@ export default class RustSearchOmnibox {
|
|||
description += ` - <dim>${Compat.escape(Compat.eliminateTags(doc.desc))}</dim>`;
|
||||
}
|
||||
|
||||
if (doc.queryType === "s" || doc.queryType === "src") {
|
||||
let url = new URL(doc.href);
|
||||
url.search = "?mode=src";
|
||||
content = url.toString();
|
||||
description = `[Source code] ${description}`;
|
||||
}
|
||||
return { content, description };
|
||||
}
|
||||
|
||||
|
@ -51,21 +41,18 @@ export default class RustSearchOmnibox {
|
|||
];
|
||||
}
|
||||
|
||||
const docsSearchMixins = {
|
||||
onSearch: (query) => {
|
||||
return stdSearcher.search(query);
|
||||
omnibox.bootstrap({
|
||||
onSearch: async (query) => {
|
||||
const result = await stdSearcher.search(query);
|
||||
return result.others || [];
|
||||
},
|
||||
onFormat: formatDoc,
|
||||
onAppend: async (query) => {
|
||||
return [{
|
||||
content: stdSearcher.getSearchUrl(query),
|
||||
description: `Search Rust docs <match>${query}</match> on ${await settings.isOfflineMode ? "offline mode" : stdSearcher.getRootPath()}`,
|
||||
content: await stdSearcher.getSearchUrl(query),
|
||||
description: `Search Rust docs <match>${query}</match> on ${await settings.isOfflineMode ? "offline mode" : await stdSearcher.rootPath}`,
|
||||
}];
|
||||
},
|
||||
};
|
||||
|
||||
omnibox.bootstrap({
|
||||
...docsSearchMixins,
|
||||
onEmptyNavigate: (content, disposition) => {
|
||||
commandManager.handleCommandEnterEvent(content, disposition);
|
||||
},
|
||||
|
@ -97,25 +84,44 @@ export default class RustSearchOmnibox {
|
|||
|
||||
omnibox.addRegexQueryEvent(/^s(?:rc)?:/i, {
|
||||
name: "Source code",
|
||||
...docsSearchMixins,
|
||||
onSearch: async (query) => {
|
||||
query = query.replace(/^s(?:rc)?:/i, "");
|
||||
const result = await stdSearcher.search(query);
|
||||
return result.others || [];
|
||||
},
|
||||
onFormat: (index, doc) => {
|
||||
let { content, description } = formatDoc(index, doc);
|
||||
let url = new URL(doc.href);
|
||||
url.search = "?mode=src";
|
||||
content = url.toString();
|
||||
description = `[Source code] ${description}`;
|
||||
return { content, description };
|
||||
},
|
||||
onAppend: async (query) => {
|
||||
return [{
|
||||
content: await stdSearcher.getSearchUrl(query),
|
||||
description: `Search Rust docs <match>${query}</match> on ${await settings.isOfflineMode ? "offline mode" : await stdSearcher.rootPath}`,
|
||||
}];
|
||||
},
|
||||
});
|
||||
|
||||
// Nightly std docs search
|
||||
omnibox.addPrefixQueryEvent("/", {
|
||||
name: "Nightly docs",
|
||||
onSearch: (query) => {
|
||||
onSearch: async (query) => {
|
||||
query = query.replaceAll("/", "").trim();
|
||||
return nightlySearcher.search(query);
|
||||
const result = await nightlySearcher.search(query);
|
||||
return result.others || [];
|
||||
},
|
||||
onFormat: (index, doc) => {
|
||||
let { content, description } = formatDoc(index, doc);
|
||||
return { content, description: '[Nightly] ' + description };
|
||||
},
|
||||
onAppend: (query) => {
|
||||
onAppend: async (query) => {
|
||||
query = query.replaceAll("/", "").trim();
|
||||
return [{
|
||||
content: nightlySearcher.getSearchUrl(query),
|
||||
description: `Search nightly Rust docs <match>${query}</match> on ${nightlySearcher.getRootPath()}`,
|
||||
content: await nightlySearcher.getSearchUrl(query),
|
||||
description: `Search nightly Rust docs <match>${query}</match> on ${nightlySearcher.rootPath}`,
|
||||
}];
|
||||
},
|
||||
});
|
||||
|
|
|
@ -82,13 +82,8 @@ async function start(omnibox) {
|
|||
}),
|
||||
);
|
||||
|
||||
let stdSearcher = new DocSearch("std", await IndexManager.getStdStableIndex(), () => {
|
||||
return isOfflineMode ? offlineDocPath : "https://doc.rust-lang.org/";
|
||||
});
|
||||
let nightlySearcher = new DocSearch("std", await IndexManager.getStdNightlyIndex(), () => {
|
||||
// Nightly docs doesn't support offline mode yet.
|
||||
return "https://doc.rust-lang.org/nightly/";
|
||||
});
|
||||
let nightlySearcher = new DocSearch("std", await IndexManager.getStdNightlyIndex(), "https://doc.rust-lang.org/nightly/");
|
||||
let stdSearcher = new DocSearch("std", await IndexManager.getStdStableIndex(), isOfflineMode ? offlineDocPath : "https://doc.rust-lang.org/");
|
||||
|
||||
RustSearchOmnibox.run({
|
||||
omnibox,
|
||||
|
|
|
@ -1,64 +1,96 @@
|
|||
(function () {
|
||||
function sendSearchIndex() {
|
||||
if (location.hostname === "docs.rs") { // docs.rs pages
|
||||
// Parse crate info from location pathname.
|
||||
let [crateName, crateVersion, libName] = location.pathname.slice(1).split("/");
|
||||
// Since this PR (https://github.com/rust-lang/docs.rs/pull/1527) merged,
|
||||
// the latest version path has changed:
|
||||
// from https://docs.rs/tokio/1.14.0/tokio/ to https://docs.rs/tokio/latest/tokio/
|
||||
//
|
||||
// If we parse the crate version from url is 'latest',
|
||||
// we should reparse it from the DOM to get the correct value.
|
||||
if (crateVersion === 'latest') {
|
||||
crateVersion = parseCrateVersionFromDOM();
|
||||
(async function () {
|
||||
async function loadDesc(descShard) {
|
||||
if (descShard.promise === null) {
|
||||
descShard.promise = new Promise((resolve, reject) => {
|
||||
descShard.resolve = resolve;
|
||||
const ds = descShard;
|
||||
const fname = `${ds.crate}-desc-${ds.shard}-`;
|
||||
const url = resourcePath(`search.desc/${descShard.crate}/${fname}`, ".js",);
|
||||
loadScript({ url, errorCallback: reject })
|
||||
}
|
||||
|
||||
let searchIndex = getSearchIndex();
|
||||
|
||||
// `itemTypes` was reordered in rust-lang/rust@28f17d97a,
|
||||
// which first shipped in rustc 1.76.0-nightly (1e9dda77b 2023-11-22),
|
||||
// preceded by rustc 1.76.0-nightly (2f8d81f9d 2023-11-21).
|
||||
//
|
||||
// Mark each index item as using old `itemTypes` if no rustdoc version
|
||||
// is available or if the version date is less than 2023-11-22.
|
||||
let date = getRustdocVersionDate();
|
||||
if (!date || date < "2023-11-22") {
|
||||
for (let indexItem of Object.values(searchIndex || {})) {
|
||||
indexItem.oldItemTypes = true;
|
||||
}
|
||||
}
|
||||
|
||||
window.postMessage({
|
||||
direction: "rust-search-extension:docs.rs",
|
||||
message: {
|
||||
libName,
|
||||
crateName,
|
||||
crateVersion,
|
||||
searchIndex,
|
||||
},
|
||||
}, "*");
|
||||
} else { // stable/nightly pages
|
||||
const STD_CRATES = ['std', 'test', 'proc_macro'];
|
||||
|
||||
// Remove unnecessary std crate's search index, such as core, alloc, etc
|
||||
let rawSearchIndex = getSearchIndex();
|
||||
let searchIndex = Object.create(null);
|
||||
STD_CRATES.forEach(crate => {
|
||||
searchIndex[crate] = rawSearchIndex[crate];
|
||||
});
|
||||
window.postMessage({
|
||||
direction: `rust-search-extension:std`,
|
||||
message: {
|
||||
searchIndex,
|
||||
},
|
||||
}, "*");
|
||||
)
|
||||
}
|
||||
console.log("Send search index success.");
|
||||
const list = await descShard.promise;
|
||||
return list;
|
||||
}
|
||||
async function loadDescShard(...crates) {
|
||||
if (!window.searchState.descShards) return null;
|
||||
|
||||
// Use [[crateName, shards]] array to construct a map.
|
||||
let crateDescsShard = [];
|
||||
for (let crate of crates) {
|
||||
let shards = {};
|
||||
for (let descShard of window.searchState.descShards.get(crate) || []) {
|
||||
shards[descShard.shard] = await loadDesc(descShard);
|
||||
}
|
||||
|
||||
crateDescsShard.push([crate, shards]);
|
||||
}
|
||||
|
||||
console.log('load desc shard:', crateDescsShard);
|
||||
return crateDescsShard;
|
||||
}
|
||||
async function sendSearchIndex() {
|
||||
// The original searchIndex loaded from search-index.js
|
||||
const originalSearchIndex = structuredClone(window.searchIndex);
|
||||
loadScript({
|
||||
url: getVar("static-root-path") + getVar("search-js"),
|
||||
loadCallback: async () => {
|
||||
// // After the search.js loaded, init the search
|
||||
// window.initSearch(window.searchIndex);
|
||||
|
||||
if (location.hostname === "docs.rs") { // docs.rs pages
|
||||
// Parse crate info from location pathname.
|
||||
let [crateName, crateVersion, libName] = location.pathname.slice(1).split("/");
|
||||
// Since this PR (https://github.com/rust-lang/docs.rs/pull/1527) merged,
|
||||
// the latest version path has changed:
|
||||
// from https://docs.rs/tokio/1.14.0/tokio/ to https://docs.rs/tokio/latest/tokio/
|
||||
//
|
||||
// If we parse the crate version from url is 'latest',
|
||||
// we should reparse it from the DOM to get the correct value.
|
||||
if (crateVersion === 'latest') {
|
||||
crateVersion = parseCrateVersionFromDOM();
|
||||
}
|
||||
|
||||
// [rustdoc] Use Map instead of Object for source files and search index #118910
|
||||
// https://github.com/rust-lang/rust/pull/118910;
|
||||
window.postMessage({
|
||||
direction: "rust-search-extension:docs.rs",
|
||||
message: {
|
||||
libName,
|
||||
crateName,
|
||||
crateVersion,
|
||||
crateTitle: parseCrateTitleFromDOM(),
|
||||
searchIndex: Array.from(originalSearchIndex),
|
||||
descShards: await loadDescShard(libName),
|
||||
},
|
||||
}, "*");
|
||||
} else { // stable/nightly pages
|
||||
const STD_CRATES = ['std', 'test', 'proc_macro'];
|
||||
|
||||
// Remove unnecessary std crate's search index, such as core, alloc, etc
|
||||
let searchIndex = new Map();
|
||||
STD_CRATES.forEach(crate => {
|
||||
searchIndex.set(crate, originalSearchIndex.get(crate));
|
||||
});
|
||||
window.postMessage({
|
||||
direction: `rust-search-extension:std`,
|
||||
message: {
|
||||
searchIndex: Array.from(searchIndex),
|
||||
descShards: await loadDescShard(...STD_CRATES),
|
||||
},
|
||||
}, "*");
|
||||
}
|
||||
console.log("Send search index success.");
|
||||
// Disable librustdoc search.js onpageshow event
|
||||
window.onpageshow = function () { };
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Before rust 1.52.0, we can get the search index from window directly.
|
||||
if (window.searchIndex) {
|
||||
sendSearchIndex();
|
||||
await sendSearchIndex();
|
||||
} else {
|
||||
// Due to the new search-index.js on-demand load mode after PR #82310 has been merged.
|
||||
// We need to trigger a manual search-index.js load here.
|
||||
|
@ -76,31 +108,16 @@
|
|||
searchIndexJs = resourcePath("search-index", ".js") || getVar('search-index-js') || getVar('search-js');
|
||||
}
|
||||
|
||||
|
||||
if (searchIndexJs) {
|
||||
let script = document.createElement('script');
|
||||
script.src = searchIndexJs;
|
||||
script.onload = sendSearchIndex;
|
||||
document.head.append(script);
|
||||
// Load search-index.js first, clone the search index for backup.
|
||||
// because after the initSearch() called, the search index will be modified.
|
||||
loadScript({ url: searchIndexJs, loadCallback: sendSearchIndex });
|
||||
} else {
|
||||
console.error("Sorry, no search index found.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// [rustdoc] Use Map instead of Object for source files and search index #118910
|
||||
// https://github.com/rust-lang/rust/pull/118910
|
||||
function getSearchIndex() {
|
||||
if (window.searchIndex instanceof Map || Object.prototype.toString.call(window.searchIndex) === '[object Map]') {
|
||||
return Object.fromEntries(window.searchIndex);
|
||||
} else {
|
||||
return window.searchIndex;
|
||||
}
|
||||
}
|
||||
|
||||
function getRustdocVersionDate() {
|
||||
return getVar("rustdoc-version")?.match(/\d{4}-\d{2}-\d{2}/)?.[0];
|
||||
}
|
||||
|
||||
// ======== Following function mirrored to librustdoc main.js ========
|
||||
|
||||
// Get rustdoc variable from DOM.
|
||||
|
|
|
@ -30,10 +30,13 @@ window.addEventListener("message", function (event) {
|
|||
event.data &&
|
||||
event.data.direction === "rust-search-extension:std") {
|
||||
let searchIndex = event.data.message.searchIndex;
|
||||
let descShards = event.data.message.descShards;
|
||||
if (TARGET === 'stable') {
|
||||
rse.IndexSetter.setStdStableIndex(searchIndex);
|
||||
rse.IndexSetter.setDescShards("std-stable", descShards);
|
||||
} else {
|
||||
rse.IndexSetter.setStdNightlyIndex(searchIndex);
|
||||
rse.IndexSetter.setDescShards("std-nightly", descShards);
|
||||
}
|
||||
let now = new Date();
|
||||
let version = `${now.getFullYear()}-${now.getMonth() + 1}-${now.getDate()}`;
|
||||
|
|
17
extension/script/docs-rs-check-crate.js
Normal file
17
extension/script/docs-rs-check-crate.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
(function () {
|
||||
if (!window.searchState?.descShards) {
|
||||
let button = document.querySelector(".add-to-extension");
|
||||
if (button) {
|
||||
button.classList.add("add-to-extension-disabled");
|
||||
button.onclick = (event) => {
|
||||
// Prevent click event propagation to parent element.
|
||||
event.stopImmediatePropagation();
|
||||
};
|
||||
}
|
||||
let content = document.querySelector(".add-to-extension-content");
|
||||
if (content) {
|
||||
content.innerHTML = `<p>This crate version is not supported by Rust Search Extension. <br><br>After Rust Search Extension v2.0 has been released, we only support crate published after 2024-04-20.</p>`;
|
||||
}
|
||||
console.log("No searchState.descShards found, cannot add this crate to Rust Search Extension.");
|
||||
}
|
||||
})();
|
|
@ -98,6 +98,12 @@ div.nav-container form.landing-search-form-nav {
|
|||
margin: 5.4px 16px 4.4px;
|
||||
}
|
||||
|
||||
.add-to-extension-disabled {
|
||||
background-color: #edebe7 !important;
|
||||
cursor: not-allowed !important;
|
||||
color: #666 !important;
|
||||
}
|
||||
|
||||
.add-to-extension:hover {
|
||||
color: #121212;
|
||||
background-color: #F9BB2D;
|
||||
|
|
|
@ -106,6 +106,7 @@ document.addEventListener("DOMContentLoaded", async () => {
|
|||
} else {
|
||||
insertAddToExtensionElement("need-to-install");
|
||||
}
|
||||
injectScripts(["script/docs-rs-check-crate.js"]);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -22,6 +22,15 @@ function parseCrateVersionFromDOM() {
|
|||
}
|
||||
}
|
||||
|
||||
function parseCrateTitleFromDOM() {
|
||||
let el = document.querySelector('form a.crate-name');
|
||||
if (el) {
|
||||
return el.getAttribute("title").substring(0, 100);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function parseCargoFeatures(content) {
|
||||
if (!content.version?.features) {
|
||||
return [];
|
||||
|
@ -57,16 +66,20 @@ function parseOptionalDependencies(content) {
|
|||
return dependencies;
|
||||
}
|
||||
|
||||
function loadScript({ url, loadCallback, errorCallback }) {
|
||||
const script = document.createElement("script");
|
||||
script.src = url;
|
||||
if (loadCallback !== undefined) {
|
||||
script.onload = loadCallback
|
||||
}
|
||||
if (errorCallback !== undefined) {
|
||||
script.onerror = errorCallback
|
||||
}
|
||||
document.head.append(script)
|
||||
}
|
||||
|
||||
function injectScripts(paths) {
|
||||
paths.map(path => {
|
||||
let script = document.createElement("script");
|
||||
script.src = chrome.runtime.getURL(path);
|
||||
script.onload = () => {
|
||||
// Remove self after loaded
|
||||
script.remove();
|
||||
};
|
||||
return script;
|
||||
}).forEach(script => {
|
||||
document.body.insertAdjacentElement('beforeBegin', script);
|
||||
paths.forEach(path => {
|
||||
loadScript({ url: chrome.runtime.getURL(path) });
|
||||
});
|
||||
}
|
||||
|
|
3310
extension/search/docs/base-v2.js
Normal file
3310
extension/search/docs/base-v2.js
Normal file
File diff suppressed because it is too large
Load diff
|
@ -1,677 +1,28 @@
|
|||
import { levenshtein } from "../algorithm.js";
|
||||
import DocSearchV2 from "./base-v2.js";
|
||||
|
||||
// This mapping table should match the discriminants of
|
||||
// `rustdoc::formats::item_type::ItemType` type in Rust.
|
||||
const itemTypes = [
|
||||
"keyword",
|
||||
"primitive",
|
||||
"mod",
|
||||
"externcrate",
|
||||
"import",
|
||||
"struct", // 5
|
||||
"enum",
|
||||
"fn",
|
||||
"type",
|
||||
"static",
|
||||
"trait", // 10
|
||||
"impl",
|
||||
"tymethod",
|
||||
"method",
|
||||
"structfield",
|
||||
"variant", // 15
|
||||
"macro",
|
||||
"associatedtype",
|
||||
"constant",
|
||||
"associatedconstant",
|
||||
"union", // 20
|
||||
"foreigntype",
|
||||
"existential",
|
||||
"attr",
|
||||
"derive",
|
||||
"traitalias", // 25
|
||||
"generic",
|
||||
];
|
||||
|
||||
// `itemTypes` was reordered in rust-lang/rust@28f17d97a,
|
||||
// we should upgrade the item type when the rustdoc version is
|
||||
// earlier than that commit, which is when `indexItem.oldItemTypes`
|
||||
// is set to `true` in script/add-search-index.js.
|
||||
//
|
||||
// Calculated by `oldItemTypes.map(ty => newItemTypes.indexOf(ty))`.
|
||||
const upgradeItemType = [
|
||||
2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
|
||||
12, 13, 14, 15, 16, 1, 17, 18, 19, 20,
|
||||
21, 0, 22, 23, 24, 25, 26
|
||||
];
|
||||
|
||||
// Max levenshtein distance.
|
||||
const MAX_LEV_DISTANCE = 2;
|
||||
|
||||
export default class DocSearch {
|
||||
|
||||
/**
|
||||
* Construct the DocSearch.
|
||||
* @param name the crate name
|
||||
* @param searchIndex the crate search index
|
||||
* @param rootPathCallback the root path callback to help dynamically get root path
|
||||
*/
|
||||
constructor(name, searchIndex, rootPathCallback) {
|
||||
export default class DocSearch extends DocSearchV2 {
|
||||
constructor(name, searchIndex, rootPath) {
|
||||
if (!(searchIndex instanceof Map)) {
|
||||
searchIndex = new Map(Object.entries(searchIndex));
|
||||
}
|
||||
super(searchIndex, rootPath);
|
||||
this.name = name;
|
||||
// The list of search words to query against.
|
||||
this.searchWords = [];
|
||||
this.searchIndex = this.buildIndex(searchIndex);
|
||||
this.getRootPath = rootPathCallback;
|
||||
|
||||
// Current query lowercase keyword.
|
||||
this.valLower = null;
|
||||
this.split = null;
|
||||
}
|
||||
|
||||
setSearchIndex(searchIndex) {
|
||||
this.searchIndex = this.buildIndex(searchIndex);
|
||||
}
|
||||
|
||||
getSearchUrl(keyword) {
|
||||
let url = `${this.getRootPath()}${this.name}/index.html`;
|
||||
async getSearchUrl(keyword) {
|
||||
let url = `${await this.rootPath}${this.name}/index.html`;
|
||||
if (keyword) {
|
||||
url += `?search=${encodeURIComponent(keyword)}`;
|
||||
}
|
||||
return url;
|
||||
}
|
||||
|
||||
search(query) {
|
||||
async search(query) {
|
||||
if (!query) return [];
|
||||
return this.execQuery(this.buildQuery(query));
|
||||
return await this.execQuery(DocSearchV2.parseQuery(query), null, this.name);
|
||||
}
|
||||
|
||||
buildIndex(rawSearchIndex) {
|
||||
let searchIndex = [];
|
||||
const searchWords = [];
|
||||
const charA = "A".charCodeAt(0);
|
||||
for (let [crateName, indexItem] of Object.entries(rawSearchIndex || {})) {
|
||||
searchWords.push(crateName);
|
||||
searchIndex.push({
|
||||
crate: crateName,
|
||||
ty: 3, // == ExternCrate
|
||||
name: crateName,
|
||||
path: "",
|
||||
desc: indexItem.doc,
|
||||
type: null,
|
||||
});
|
||||
|
||||
// https://github.com/rust-lang/rust/pull/83003
|
||||
// librustdoc has switched the search-index.js from a "array of struct" to a "struct of array" format.
|
||||
// We need to compat both the new and old formats.
|
||||
if (["t", "n", "q", "d", "i", "f", "p"].every(key => key in indexItem)) {
|
||||
// an array of (Number) item types (before 1.69.0)
|
||||
// However, it changed since this PR: https://github.com/rust-lang/rust/pull/108013
|
||||
// a String of one character item type codes (since 1.69.0)
|
||||
const itemTypes = indexItem.t;
|
||||
// an array of (String) item names
|
||||
const itemNames = indexItem.n;
|
||||
// an array of (String) full paths (or empty string for previous path)
|
||||
let itemPaths = indexItem.q;
|
||||
if (itemPaths.length > 0 && Array.isArray(itemPaths[0])) {
|
||||
// an array of [(Number) item index,
|
||||
// (String) full path]
|
||||
// an item whose index is not present will fall back to the previous present path
|
||||
// i.e. if indices 4 and 11 are present, but 5-10 and 12-13 are not present,
|
||||
// 5-10 will fall back to the path for 4 and 12-13 will fall back to the path for 11
|
||||
//
|
||||
// Since Rust 1.70 this is a Map instead of an array.
|
||||
// See https://github.com/rust-lang/rust/pull/107629
|
||||
itemPaths = new Map(indexItem.q);
|
||||
}
|
||||
// an array of (String) descriptions
|
||||
const itemDescs = indexItem.d;
|
||||
// an array of (Number) the parent path index + 1 to `paths`, or 0 if none
|
||||
const itemParentIdxs = indexItem.i;
|
||||
// an array of (Object | null) the type of the function, if any
|
||||
const itemFunctionSearchTypes = indexItem.f;
|
||||
// an array of [(Number) item type,
|
||||
// (String) name]
|
||||
let paths = indexItem.p;
|
||||
|
||||
// convert `paths` into an object form
|
||||
for (let i = 0; i < paths.length; ++i) {
|
||||
if (Array.isArray(paths[i])) {
|
||||
let ty = paths[i][0];
|
||||
// See the comments on `upgradeItemType`.
|
||||
if (indexItem.oldItemTypes) {
|
||||
ty = upgradeItemType[ty];
|
||||
}
|
||||
|
||||
paths[i] = { ty, name: paths[i][1] };
|
||||
}
|
||||
}
|
||||
|
||||
// convert `item*` into an object form, and construct word indices.
|
||||
//
|
||||
// before any analysis is performed lets gather the search terms to
|
||||
// search against apart from the rest of the data. This is a quick
|
||||
// operation that is cached for the life of the page state so that
|
||||
// all other search operations have access to this cached data for
|
||||
// faster analysis operations
|
||||
let len = itemTypes.length;
|
||||
let lastPath = "";
|
||||
for (let i = 0; i < len; ++i) {
|
||||
let rawTy = itemTypes[i];
|
||||
// `itemTypes` changed from number array to string since Rust 1.69,
|
||||
// we should compat both versions.
|
||||
// see this PR: https://github.com/rust-lang/rust/pull/108013
|
||||
let ty = typeof rawTy === 'string' ? rawTy.charCodeAt(0) - charA : rawTy;
|
||||
|
||||
// See the comments on `upgradeItemType`.
|
||||
if (indexItem.oldItemTypes) {
|
||||
ty = upgradeItemType[ty];
|
||||
}
|
||||
|
||||
let path = lastPath;
|
||||
// check if itemPaths is map
|
||||
// Since Rust 1.70, the itemPaths has changed from array to map.
|
||||
// See https://github.com/rust-lang/rust/pull/107629
|
||||
if (itemPaths instanceof Map) {
|
||||
path = itemPaths.get(i) || lastPath;
|
||||
} else {
|
||||
path = itemPaths[i] || lastPath;
|
||||
}
|
||||
let row = {
|
||||
crate: crateName,
|
||||
ty,
|
||||
name: itemNames[i],
|
||||
path,
|
||||
desc: itemDescs[i],
|
||||
parent: itemParentIdxs[i] > 0 ? paths[itemParentIdxs[i] - 1] : undefined,
|
||||
type: itemFunctionSearchTypes[i],
|
||||
};
|
||||
searchIndex.push(row);
|
||||
if (typeof row.name === "string") {
|
||||
let word = row.name.toLowerCase();
|
||||
searchWords.push(word);
|
||||
} else {
|
||||
searchWords.push("");
|
||||
}
|
||||
lastPath = row.path;
|
||||
}
|
||||
} else {
|
||||
// an array of [(Number) item type,
|
||||
// (String) name,
|
||||
// (String) full path or empty string for previous path,
|
||||
// (String) description,
|
||||
// (Number | null) the parent path index to `paths`]
|
||||
// (Object | null) the type of the function (if any)
|
||||
// Compat old style (items, paths) and new style (i, p)
|
||||
const items = indexItem.items || indexItem.i;
|
||||
|
||||
// an array of [(Number) item type,
|
||||
// (String) name]
|
||||
let paths = indexItem.paths || indexItem.p;
|
||||
|
||||
// convert `paths` into an object form
|
||||
for (let i = 0; i < paths.length; ++i) {
|
||||
if (Array.isArray(paths[i])) {
|
||||
let ty = paths[i][0];
|
||||
// See the comments on `upgradeItemType`.
|
||||
if (indexItem.oldItemTypes) {
|
||||
ty = upgradeItemType[ty];
|
||||
}
|
||||
|
||||
paths[i] = { ty, name: paths[i][1] };
|
||||
}
|
||||
}
|
||||
|
||||
// convert `items` into an object form, and construct word indices.
|
||||
//
|
||||
// before any analysis is performed lets gather the search terms to
|
||||
// search against apart from the rest of the data. This is a quick
|
||||
// operation that is cached for the life of the page state so that
|
||||
// all other search operations have access to this cached data for
|
||||
// faster analysis operations
|
||||
let len = items.length;
|
||||
let lastPath = "";
|
||||
for (let i = 0; i < len; ++i) {
|
||||
const rawRow = items[i];
|
||||
|
||||
let ty = rawRow[0];
|
||||
// See the comments on `upgradeItemType`.
|
||||
if (indexItem.oldItemTypes) {
|
||||
ty = upgradeItemType[ty];
|
||||
}
|
||||
|
||||
let row = {
|
||||
crate: crateName,
|
||||
ty,
|
||||
name: rawRow[1],
|
||||
path: rawRow[2] || lastPath,
|
||||
desc: rawRow[3],
|
||||
parent: paths[rawRow[4]],
|
||||
type: rawRow[5]
|
||||
};
|
||||
searchIndex.push(row);
|
||||
if (typeof row.name === "string") {
|
||||
let word = row.name.toLowerCase();
|
||||
searchWords.push(word);
|
||||
} else {
|
||||
searchWords.push("");
|
||||
}
|
||||
lastPath = row.path;
|
||||
}
|
||||
}
|
||||
}
|
||||
this.searchWords = searchWords;
|
||||
return searchIndex;
|
||||
}
|
||||
|
||||
buildQuery(raw) {
|
||||
let matches, type, query;
|
||||
query = raw;
|
||||
|
||||
// let query = "fn:unwrap";
|
||||
// then the matches is ["fn:", "fn", index: 0, input: "fn:unwrap", groups: undefined]
|
||||
matches = query.match(/^(fn|mod|struct|enum|trait|type|const|macro|s|src)\s*:\s*/i);
|
||||
if (matches) {
|
||||
type = matches[1].replace(/^const$/, 'constant');
|
||||
query = query.substring(matches[0].length);
|
||||
}
|
||||
|
||||
return {
|
||||
raw: raw,
|
||||
query: query,
|
||||
type: type,
|
||||
id: query + type
|
||||
};
|
||||
}
|
||||
|
||||
execQuery(query) {
|
||||
function itemTypeFromName(typename) {
|
||||
for (let i = 0; i < itemTypes.length; ++i) {
|
||||
if (itemTypes[i] === typename) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
function checkPath(contains, lastElem, ty) {
|
||||
if (contains.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
let ret_lev = MAX_LEV_DISTANCE + 1;
|
||||
const path = ty.path.split("::");
|
||||
|
||||
if (ty.parent && ty.parent.name) {
|
||||
path.push(ty.parent.name.toLowerCase());
|
||||
}
|
||||
|
||||
if (contains.length > path.length) {
|
||||
return MAX_LEV_DISTANCE + 1;
|
||||
}
|
||||
for (let i = 0; i < path.length; ++i) {
|
||||
if (i + contains.length > path.length) {
|
||||
break;
|
||||
}
|
||||
let lev_total = 0;
|
||||
let aborted = false;
|
||||
for (let x = 0; x < contains.length; ++x) {
|
||||
const lev = levenshtein(path[i + x], contains[x]);
|
||||
if (lev > MAX_LEV_DISTANCE) {
|
||||
aborted = true;
|
||||
break;
|
||||
}
|
||||
lev_total += lev;
|
||||
}
|
||||
if (aborted === false) {
|
||||
ret_lev = Math.min(ret_lev, Math.round(lev_total / contains.length));
|
||||
}
|
||||
}
|
||||
return ret_lev;
|
||||
}
|
||||
|
||||
function typePassesFilter(filter, type) {
|
||||
// No filter
|
||||
if (filter < 0) return true;
|
||||
|
||||
// Exact match
|
||||
if (filter === type) return true;
|
||||
|
||||
// Match related items
|
||||
const name = itemTypes[type];
|
||||
switch (itemTypes[filter]) {
|
||||
case "constant":
|
||||
return (name === "associatedconstant");
|
||||
case "fn":
|
||||
return (name === "method" || name === "tymethod");
|
||||
case "type":
|
||||
return (name === "primitive" || name === "keyword");
|
||||
}
|
||||
|
||||
// No match
|
||||
return false;
|
||||
}
|
||||
|
||||
function generateId(ty) {
|
||||
if (ty.parent && ty.parent.name) {
|
||||
return itemTypes[ty.ty] + ty.path + ty.parent.name + ty.name;
|
||||
}
|
||||
return itemTypes[ty.ty] + ty.path + ty.name;
|
||||
}
|
||||
|
||||
this.valLower = query.query.toLowerCase();
|
||||
this.split = this.valLower.split("::");
|
||||
|
||||
let val = this.valLower;
|
||||
const typeFilter = itemTypeFromName(query.type),
|
||||
results = Object.create(null)
|
||||
|
||||
for (let z = 0; z < this.split.length; ++z) {
|
||||
if (this.split[z] === "") {
|
||||
this.split.splice(z, 1);
|
||||
z -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
const nSearchWords = this.searchWords.length;
|
||||
query.inputs = [val];
|
||||
query.output = val;
|
||||
query.search = val;
|
||||
// gather matching search results up to a certain maximum
|
||||
// val = val.replace(/\_/g, "");
|
||||
|
||||
// var valGenerics = extractGenerics(val);
|
||||
|
||||
const paths = this.valLower.split("::");
|
||||
let j;
|
||||
// "std::option::".split("::") => ["std", "option", ""]
|
||||
for (j = 0; j < paths.length; ++j) {
|
||||
if (paths[j] === "") {
|
||||
paths.splice(j, 1);
|
||||
j -= 1;
|
||||
}
|
||||
}
|
||||
val = paths[paths.length - 1];
|
||||
let contains = paths.slice(0, paths.length > 1 ? paths.length - 1 : 1);
|
||||
|
||||
for (j = 0; j < nSearchWords; ++j) {
|
||||
let ty = this.searchIndex[j];
|
||||
if (!ty) {
|
||||
continue;
|
||||
}
|
||||
let lev_add = 0;
|
||||
if (paths.length > 1) {
|
||||
let lev = checkPath(contains, paths[paths.length - 1], ty);
|
||||
if (lev > MAX_LEV_DISTANCE) {
|
||||
continue;
|
||||
} else if (lev > 0) {
|
||||
lev_add = 1;
|
||||
}
|
||||
}
|
||||
|
||||
let index = -1;
|
||||
// we want lev results to go lower than others
|
||||
let lev = MAX_LEV_DISTANCE + 1;
|
||||
const fullId = generateId(ty);
|
||||
|
||||
if (this.searchWords[j].indexOf(val) > -1 ||
|
||||
this.searchWords[j].replace(/_/g, "").indexOf(val) > -1) {
|
||||
// filter type: ... queries
|
||||
if (typePassesFilter(typeFilter, ty.ty) && results[fullId] === undefined) {
|
||||
index = this.searchWords[j].replace(/_/g, "").indexOf(val);
|
||||
}
|
||||
}
|
||||
if ((lev = levenshtein(this.searchWords[j], val)) <= MAX_LEV_DISTANCE) {
|
||||
if (typePassesFilter(typeFilter, ty.ty) === false) {
|
||||
lev = MAX_LEV_DISTANCE + 1;
|
||||
} else {
|
||||
lev += 1;
|
||||
}
|
||||
}
|
||||
|
||||
lev += lev_add;
|
||||
if (lev > 0 && val && val.length > 3 && this.searchWords[j].indexOf(val) > -1) {
|
||||
if (val.length < 6) {
|
||||
lev -= 1;
|
||||
} else {
|
||||
lev = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (index !== -1 || lev <= MAX_LEV_DISTANCE) {
|
||||
if (index !== -1 && paths.length < 2) {
|
||||
lev = 0;
|
||||
}
|
||||
if (results[fullId] === undefined) {
|
||||
results[fullId] = {
|
||||
id: j,
|
||||
index: index,
|
||||
lev: lev,
|
||||
type: query.type,
|
||||
};
|
||||
}
|
||||
results[fullId].lev = Math.min(results[fullId].lev, lev);
|
||||
}
|
||||
}
|
||||
|
||||
return this.sortResults(results);
|
||||
}
|
||||
|
||||
sortResults(results) {
|
||||
const ar = [];
|
||||
for (let entry of Object.values(results)) {
|
||||
ar.push(entry);
|
||||
}
|
||||
results = ar;
|
||||
const nresults = results.length;
|
||||
for (let i = 0; i < nresults; ++i) {
|
||||
results[i].word = this.searchWords[results[i].id];
|
||||
results[i].item = this.searchIndex[results[i].id] || {};
|
||||
}
|
||||
// if there are no results then return to default and fail
|
||||
if (results.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const valLower = this.valLower;
|
||||
results.sort(function (aaa, bbb) {
|
||||
let a, b;
|
||||
|
||||
// Sort by non levenshtein results and then levenshtein results by the distance
|
||||
// (less changes required to match means higher rankings)
|
||||
a = (aaa.lev);
|
||||
b = (bbb.lev);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by exact match (mismatch goes later)
|
||||
a = (aaa.word !== valLower);
|
||||
b = (bbb.word !== valLower);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by item name length (longer goes later)
|
||||
a = aaa.word.length;
|
||||
b = bbb.word.length;
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by item name (lexicographically larger goes later)
|
||||
a = aaa.word;
|
||||
b = bbb.word;
|
||||
if (a !== b) {
|
||||
return (a > b ? +1 : -1);
|
||||
}
|
||||
|
||||
// sort by index of keyword in item name (no literal occurrence goes later)
|
||||
a = (aaa.index < 0);
|
||||
b = (bbb.index < 0);
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
// (later literal occurrence, if any, goes later)
|
||||
a = aaa.index;
|
||||
b = bbb.index;
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by description (no description goes later)
|
||||
a = (aaa.item.desc === '');
|
||||
b = (bbb.item.desc === '');
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by type (later occurrence in `itemTypes` goes later)
|
||||
a = aaa.item.ty;
|
||||
b = bbb.item.ty;
|
||||
if (a !== b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// sort by path (lexicographically larger goes later)
|
||||
a = aaa.item.path;
|
||||
b = bbb.item.path;
|
||||
if (a !== b) {
|
||||
return (a > b ? +1 : -1);
|
||||
}
|
||||
|
||||
// que sera, sera
|
||||
return 0;
|
||||
});
|
||||
|
||||
for (let i = 0; i < results.length; ++i) {
|
||||
const result = results[i];
|
||||
|
||||
// this validation does not make sense when searching by types
|
||||
if (result.dontValidate) {
|
||||
continue;
|
||||
}
|
||||
const name = result.item.name?.toLowerCase(),
|
||||
path = result.item.path?.toLowerCase(),
|
||||
parent = result.item.parent;
|
||||
|
||||
if (this.validateResult(name, path, this.split, parent) === false) {
|
||||
result.id = -1;
|
||||
}
|
||||
}
|
||||
return this.transformResults(results);
|
||||
}
|
||||
|
||||
transformResults(results, isType) {
|
||||
const out = [];
|
||||
for (let i = 0; i < results.length; ++i) {
|
||||
if (results[i].id > -1) {
|
||||
const obj = this.searchIndex[results[i].id];
|
||||
obj.lev = results[i].lev;
|
||||
if (isType !== true || obj.type) {
|
||||
const res = this.buildHrefAndPath(obj);
|
||||
// obj.displayPath = pathSplitter(res[0]);
|
||||
obj.displayPath = res[0];
|
||||
obj.fullPath = obj.displayPath + obj.name;
|
||||
// To be sure than it some items aren't considered as duplicate.
|
||||
// obj.fullPath += '|' + obj.ty;
|
||||
obj.href = res[1];
|
||||
// The queryType mean 'fn', 'trait', 'src' search types.
|
||||
// See buildQuery() method.
|
||||
obj.queryType = results[i].type;
|
||||
out.push(obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
buildHrefAndPath(item) {
|
||||
let rootPath = this.getRootPath();
|
||||
let displayPath;
|
||||
let href;
|
||||
const type = itemTypes[item.ty];
|
||||
const name = item.name;
|
||||
let path = item.path;
|
||||
|
||||
if (type === "mod") {
|
||||
displayPath = path + "::";
|
||||
href = rootPath + path.replace(/::/g, "/") + "/" +
|
||||
name + "/index.html";
|
||||
} else if (type === "import") {
|
||||
displayPath = item.path + "::";
|
||||
href = rootPath + item.path.replace(/::/g, "/") + "/index.html#reexport." + name;
|
||||
} else if (type === "primitive" || type === "keyword") {
|
||||
displayPath = "";
|
||||
href = rootPath + path.replace(/::/g, "/") +
|
||||
"/" + type + "." + name + ".html";
|
||||
} else if (type === "externcrate") {
|
||||
displayPath = "";
|
||||
href = rootPath + name + "/index.html";
|
||||
} else if (item.parent !== undefined) {
|
||||
const myparent = item.parent;
|
||||
let anchor = "#" + type + "." + name;
|
||||
const parentType = itemTypes[myparent.ty];
|
||||
let pageType = parentType;
|
||||
let pageName = myparent.name;
|
||||
|
||||
if (parentType === "primitive") {
|
||||
displayPath = myparent.name + "::";
|
||||
} else if (type === "structfield" && parentType === "variant") {
|
||||
// Structfields belonging to variants are special: the
|
||||
// final path element is the enum name.
|
||||
const splitPath = item.path.split("::");
|
||||
const enumName = splitPath.pop();
|
||||
path = splitPath.join("::");
|
||||
displayPath = path + "::" + enumName + "::" + myparent.name + "::";
|
||||
anchor = "#variant." + myparent.name + ".field." + name;
|
||||
pageType = "enum";
|
||||
pageName = enumName;
|
||||
} else {
|
||||
displayPath = path + "::" + myparent.name + "::";
|
||||
}
|
||||
href = rootPath + path.replace(/::/g, "/") +
|
||||
"/" + pageType +
|
||||
"." + pageName +
|
||||
".html" + anchor;
|
||||
} else {
|
||||
displayPath = item.path + "::";
|
||||
href = rootPath + item.path.replace(/::/g, "/") +
|
||||
"/" + type + "." + name + ".html";
|
||||
}
|
||||
return [displayPath, href];
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Validate performs the following boolean logic. For example:
|
||||
* "File::open" will give IF A PARENT EXISTS => ("file" && "open")
|
||||
* exists in (name || path || parent) OR => ("file" && "open") exists in
|
||||
* (name || path )
|
||||
*
|
||||
* This could be written functionally, but I wanted to minimise
|
||||
* functions on stack.
|
||||
*
|
||||
* @param {[string]} name [The name of the result]
|
||||
* @param {[string]} path [The path of the result]
|
||||
* @param {[string]} keys [The keys to be used (["file", "open"])]
|
||||
* @param {[object]} parent [The parent of the result]
|
||||
* @return {boolean} [Whether the result is valid or not]
|
||||
*/
|
||||
validateResult(name, path, keys, parent) {
|
||||
for (let i = 0; i < keys.length; ++i) {
|
||||
// each check is for validation so we negate the conditions and invalidate
|
||||
if (!(
|
||||
// check for an exact name match
|
||||
name.indexOf(keys[i]) > -1 ||
|
||||
// then an exact path match
|
||||
path.indexOf(keys[i]) > -1 ||
|
||||
// next if there is a parent, check for exact parent match
|
||||
(parent !== undefined &&
|
||||
parent.name !== undefined &&
|
||||
parent.name.toLowerCase().indexOf(keys[i]) > -1) ||
|
||||
// lastly check to see if the name was a levenshtein match
|
||||
levenshtein(name, keys[i]) <= MAX_LEV_DISTANCE)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
}
|
|
@ -4,11 +4,8 @@ import settings from "../../settings.js";
|
|||
|
||||
// A DocSearch dedicated to a single crate based on the search-index.
|
||||
class SingleCrateDocSearch extends DocSearch {
|
||||
|
||||
constructor(name, version, searchIndex) {
|
||||
super(name, searchIndex, () => {
|
||||
return `https://docs.rs/${name}/${this.version}/`;
|
||||
});
|
||||
super(name, searchIndex, `https://docs.rs/${name}/${version}/`);
|
||||
this.version = version;
|
||||
}
|
||||
}
|
||||
|
@ -21,9 +18,13 @@ export default class CrateDocSearch {
|
|||
}
|
||||
|
||||
async initAllCrateSearcher() {
|
||||
let searchIndex = Object.create(null)
|
||||
let searchIndex = new Map();
|
||||
for (const libName of Object.keys(await CrateDocManager.getCrates())) {
|
||||
searchIndex = Object.assign(searchIndex, await CrateDocManager.getCrateSearchIndex(libName));
|
||||
let crateSearchIndex = await CrateDocManager.getCrateSearchIndex(libName);
|
||||
if (crateSearchIndex) {
|
||||
// merge search index into single map
|
||||
searchIndex = new Map([...searchIndex, ...crateSearchIndex]);
|
||||
}
|
||||
}
|
||||
this.allCrateSearcher = new SingleCrateDocSearch("~", "*", searchIndex);
|
||||
}
|
||||
|
@ -69,10 +70,11 @@ export default class CrateDocSearch {
|
|||
}
|
||||
}
|
||||
|
||||
let results = searcher.search(keyword);
|
||||
let results = await searcher.search(keyword);
|
||||
results = results.others || [];
|
||||
// Push result footer.
|
||||
results.push({
|
||||
content: searcher.getSearchUrl(keyword),
|
||||
content: await searcher.getSearchUrl(keyword),
|
||||
description: `Search ${keyword ? `<match>${keyword}</match>` : 'keyword'} on <dim>${`https://docs.rs/${crateName}`}</dim> directly`,
|
||||
});
|
||||
return results;
|
||||
|
@ -84,7 +86,7 @@ export default class CrateDocSearch {
|
|||
await this.initAllCrateSearcher();
|
||||
}
|
||||
let keyword = query.replace("~", "").trim();
|
||||
return this.allCrateSearcher.search(keyword);
|
||||
return await this.allCrateSearcher.search(keyword);
|
||||
}
|
||||
|
||||
// Invalidate cached search. This is needed if we update crate's search index.
|
||||
|
|
47
extension/search/docs/desc-shard.js
Normal file
47
extension/search/docs/desc-shard.js
Normal file
|
@ -0,0 +1,47 @@
|
|||
import CrateDocManager from "../../crate-manager.js";
|
||||
import IndexManager from "../../index-manager.js";
|
||||
|
||||
export function convertToIndexJS(shards) {
|
||||
let array = new Array();
|
||||
for (let [crate, shard] of Object.entries(shards)) {
|
||||
array.push([crate, shard]);
|
||||
}
|
||||
return `new Map(JSON.parse('${JSON.stringify(array)}'));`;
|
||||
}
|
||||
|
||||
class DescShardManager {
|
||||
constructor() {
|
||||
// A dummy descShards map to allow interact in librustdoc's DocSearch js
|
||||
this.descShards = new DummyMap();
|
||||
// The real crate -> desc shard map.
|
||||
this._descShards = new Map();
|
||||
this.initDescShards();
|
||||
}
|
||||
|
||||
async initDescShards() {
|
||||
this._descShards = await IndexManager.getDescShards('std-stable');
|
||||
for (const crate of Object.keys(await CrateDocManager.getCrates())) {
|
||||
const descShards = await IndexManager.getDescShards(crate);
|
||||
this._descShards = new Map([...this._descShards, ...descShards]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Load a single desc shard.
|
||||
// Compatible with librustdoc main.js.
|
||||
async loadDesc({ descShard, descIndex }) {
|
||||
let crateDescShard = this._descShards.get(descShard.crate);
|
||||
if (!crateDescShard || crateDescShard.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return crateDescShard[descShard.shard][descIndex];
|
||||
}
|
||||
}
|
||||
|
||||
class DummyMap {
|
||||
set(_ke, _value) { }
|
||||
}
|
||||
|
||||
|
||||
const searchState = new DescShardManager();
|
||||
export default searchState;
|
Loading…
Reference in a new issue