diff --git a/.env b/.env
index 14e36c5ec..5bbf70d83 100644
--- a/.env
+++ b/.env
@@ -1,2 +1,5 @@
-VITE_VERSION_LATEST="v11.0.0"
-VITE_VERSION_NEXT="v12.0.0"
\ No newline at end of file
+VITE_VERSION_LATEST=12.0.0
+VITE_VERSION_NEXT=13.0.0
+VITE_ALGOLIA_READ_API_KEY=667630d6ab41eff82df15fdc6a55153f
+VITE_ALGOLIA_APP_ID=1T1PRULLJT
+VITE_ALGOLIA_INDEX_NAME=dev_2026
diff --git a/__tests__/SearchIndex_.test.res b/__tests__/SearchIndex_.test.res
new file mode 100644
index 000000000..cb394f118
--- /dev/null
+++ b/__tests__/SearchIndex_.test.res
@@ -0,0 +1,541 @@
+open Vitest
+
+// ---------------------------------------------------------------------------
+// maxContentLength
+// ---------------------------------------------------------------------------
+
+describe("maxContentLength", () => {
+ test("is 500", async () => {
+ expect(SearchIndex.maxContentLength)->toBe(500)
+ })
+})
+
+// ---------------------------------------------------------------------------
+// truncate
+// ---------------------------------------------------------------------------
+
+describe("truncate", () => {
+ test("returns string as-is when shorter than maxLen", async () => {
+ expect(SearchIndex.truncate("hello", ~maxLen=10))->toBe("hello")
+ })
+
+ test("returns string as-is when exactly maxLen", async () => {
+ expect(SearchIndex.truncate("hello", ~maxLen=5))->toBe("hello")
+ })
+
+ test("truncates and adds ellipsis when longer than maxLen", async () => {
+ expect(SearchIndex.truncate("hello world", ~maxLen=5))->toBe("hello...")
+ })
+
+ test("handles empty string", async () => {
+ expect(SearchIndex.truncate("", ~maxLen=5))->toBe("")
+ })
+
+ test("truncates to maxLen=0 with ellipsis", async () => {
+ expect(SearchIndex.truncate("abc", ~maxLen=0))->toBe("...")
+ })
+
+ test("truncates to single character with ellipsis", async () => {
+ expect(SearchIndex.truncate("abcdef", ~maxLen=1))->toBe("a...")
+ })
+})
+
+// ---------------------------------------------------------------------------
+// slugify
+// ---------------------------------------------------------------------------
+
+describe("slugify", () => {
+ test("lowercases text", async () => {
+ expect(SearchIndex.slugify("Hello World"))->toBe("hello-world")
+ })
+
+ test("replaces spaces with hyphens", async () => {
+ expect(SearchIndex.slugify("foo bar baz"))->toBe("foo-bar-baz")
+ })
+
+ test("removes non-alphanumeric characters", async () => {
+ expect(SearchIndex.slugify("Hello, World!"))->toBe("hello-world")
+ })
+
+ test("collapses multiple spaces into single hyphen", async () => {
+ expect(SearchIndex.slugify("foo bar"))->toBe("foo-bar")
+ })
+
+ test("handles empty string", async () => {
+ expect(SearchIndex.slugify(""))->toBe("")
+ })
+
+ test("preserves numbers", async () => {
+ expect(SearchIndex.slugify("Section 42"))->toBe("section-42")
+ })
+
+ test("removes special characters like parentheses and dots", async () => {
+ expect(SearchIndex.slugify("Array.map()"))->toBe("arraymap")
+ })
+
+ test("handles already-slugified text", async () => {
+ expect(SearchIndex.slugify("already-slugified"))->toBe("already-slugified")
+ })
+})
+
+// ---------------------------------------------------------------------------
+// stripMdxTags
+// ---------------------------------------------------------------------------
+
+describe("stripMdxTags", () => {
+ test("removes CodeTab blocks", async () => {
+ let input = "before\n
tags",
+ async () => {
+ expect(Search.markdownToHtml("`Array.map`"))->toBe("Array.map")
+ },
+ )
+
+ test(
+ "converts multiple backtick spans",
+ async () => {
+ expect(Search.markdownToHtml("Use `map` and `filter`"))->toBe(
+ "Use map and filter",
+ )
+ },
+ )
+ })
+
+ // --- bold ---
+
+ describe("bold", () => {
+ test(
+ "converts **text** to tags",
+ async () => {
+ expect(Search.markdownToHtml("**important**"))->toBe("important")
+ },
+ )
+
+ test(
+ "converts bold within a sentence",
+ async () => {
+ expect(Search.markdownToHtml("This is **very** important"))->toBe(
+ "This is very important",
+ )
+ },
+ )
+ })
+
+ // --- italic ---
+
+ describe("italic", () => {
+ test(
+ "converts *text* to tags",
+ async () => {
+ expect(Search.markdownToHtml("*emphasis*"))->toBe("emphasis")
+ },
+ )
+
+ test(
+ "converts italic within a sentence",
+ async () => {
+ expect(Search.markdownToHtml("This is *quite* nice"))->toBe("This is quite nice")
+ },
+ )
+ })
+
+ // --- newlines ---
+
+ describe("newlines", () => {
+ test(
+ "converts double newline to
",
+ async () => {
+ expect(Search.markdownToHtml("first\n\nsecond"))->toBe("first
second")
+ },
+ )
+
+ test(
+ "converts triple+ newlines to single
",
+ async () => {
+ expect(Search.markdownToHtml("first\n\n\nsecond"))->toBe("first
second")
+ },
+ )
+
+ test(
+ "converts single newline to space",
+ async () => {
+ expect(Search.markdownToHtml("first\nsecond"))->toBe("first second")
+ },
+ )
+ })
+
+ // --- trimming ---
+
+ describe("trimming", () => {
+ test(
+ "trims leading whitespace",
+ async () => {
+ expect(Search.markdownToHtml(" hello"))->toBe("hello")
+ },
+ )
+
+ test(
+ "trims trailing whitespace",
+ async () => {
+ expect(Search.markdownToHtml("hello "))->toBe("hello")
+ },
+ )
+
+ test(
+ "trims both sides",
+ async () => {
+ expect(Search.markdownToHtml(" hello "))->toBe("hello")
+ },
+ )
+ })
+
+ // --- combined / edge cases ---
+
+ describe("combined transformations", () => {
+ test(
+ "handles empty string",
+ async () => {
+ expect(Search.markdownToHtml(""))->toBe("")
+ },
+ )
+
+ test(
+ "plain text passes through unchanged",
+ async () => {
+ expect(Search.markdownToHtml("just plain text"))->toBe("just plain text")
+ },
+ )
+
+ test(
+ "applies multiple transformations together",
+ async () => {
+ expect(
+ Search.markdownToHtml(
+ "Use `map` on **arrays**.\n\nSee [docs](http://x.com) for *details*.",
+ ),
+ )->toBe(
+ "Use map on arrays.
See docs for details.",
+ )
+ },
+ )
+
+ test(
+ "bold inside code still gets converted (sequential regex application)",
+ async () => {
+ expect(Search.markdownToHtml("`**notbold**`"))->toBe(
+ "notbold",
+ )
+ },
+ )
+ })
+})
+
+// ---------------------------------------------------------------------------
+// isChildHit
+// ---------------------------------------------------------------------------
+
+describe("isChildHit", () => {
+ // --- child-level types (always true) ---
+
+ describe("child-level types", () => {
+ test(
+ "Lvl2 is a child hit",
+ async () => {
+ expect(Search.isChildHit(makeHit(~type_=Lvl2, ~url="https://example.com/page")))->toBe(true)
+ },
+ )
+
+ test(
+ "Lvl3 is a child hit",
+ async () => {
+ expect(Search.isChildHit(makeHit(~type_=Lvl3, ~url="https://example.com/page")))->toBe(true)
+ },
+ )
+
+ test(
+ "Lvl4 is a child hit",
+ async () => {
+ expect(Search.isChildHit(makeHit(~type_=Lvl4, ~url="https://example.com/page")))->toBe(true)
+ },
+ )
+
+ test(
+ "Lvl5 is a child hit",
+ async () => {
+ expect(Search.isChildHit(makeHit(~type_=Lvl5, ~url="https://example.com/page")))->toBe(true)
+ },
+ )
+
+ test(
+ "Lvl6 is a child hit",
+ async () => {
+ expect(Search.isChildHit(makeHit(~type_=Lvl6, ~url="https://example.com/page")))->toBe(true)
+ },
+ )
+
+ test(
+ "Content is a child hit",
+ async () => {
+ expect(Search.isChildHit(makeHit(~type_=Content, ~url="https://example.com/page")))->toBe(
+ true,
+ )
+ },
+ )
+
+ test(
+ "Lvl2 is a child hit even without hash in URL",
+ async () => {
+ expect(Search.isChildHit(makeHit(~type_=Lvl2, ~url="https://example.com/no-hash")))->toBe(
+ true,
+ )
+ },
+ )
+
+ test(
+ "Content is a child hit even with hash in URL",
+ async () => {
+ expect(
+ Search.isChildHit(makeHit(~type_=Content, ~url="https://example.com/page#section")),
+ )->toBe(true)
+ },
+ )
+ })
+
+ // --- Lvl0 ---
+
+ describe("Lvl0", () => {
+ test(
+ "Lvl0 without hash is not a child hit",
+ async () => {
+ expect(Search.isChildHit(makeHit(~type_=Lvl0, ~url="https://example.com/page")))->toBe(
+ false,
+ )
+ },
+ )
+
+ test(
+ "Lvl0 with hash is a child hit",
+ async () => {
+ expect(
+ Search.isChildHit(makeHit(~type_=Lvl0, ~url="https://example.com/page#section")),
+ )->toBe(true)
+ },
+ )
+
+ test(
+ "Lvl0 with hash at end of URL is a child hit",
+ async () => {
+ expect(Search.isChildHit(makeHit(~type_=Lvl0, ~url="https://example.com/page#")))->toBe(
+ true,
+ )
+ },
+ )
+ })
+
+ // --- Lvl1 ---
+
+ describe("Lvl1", () => {
+ test(
+ "Lvl1 without hash is not a child hit",
+ async () => {
+ expect(Search.isChildHit(makeHit(~type_=Lvl1, ~url="https://example.com/page")))->toBe(
+ false,
+ )
+ },
+ )
+
+ test(
+ "Lvl1 with hash is a child hit",
+ async () => {
+ expect(
+ Search.isChildHit(makeHit(~type_=Lvl1, ~url="https://example.com/page#heading")),
+ )->toBe(true)
+ },
+ )
+
+ test(
+ "Lvl1 with deeply nested hash anchor is a child hit",
+ async () => {
+ expect(
+ Search.isChildHit(
+ makeHit(~type_=Lvl1, ~url="https://example.com/docs/manual/api#some-section"),
+ ),
+ )->toBe(true)
+ },
+ )
+
+ test(
+ "Lvl1 with empty URL is not a child hit",
+ async () => {
+ expect(Search.isChildHit(makeHit(~type_=Lvl1, ~url="")))->toBe(false)
+ },
+ )
+ })
+})
diff --git a/__tests__/Url_.test.res b/__tests__/Url_.test.res
new file mode 100644
index 000000000..5cf00796f
--- /dev/null
+++ b/__tests__/Url_.test.res
@@ -0,0 +1,75 @@
+open Vitest
+
+// ---------------------------------------------------------------------------
+// Url.parse – version detection
+// ---------------------------------------------------------------------------
+
+describe("Url.parse version detection", () => {
+ test("parses v-prefixed semver version", async () => {
+ let result = Url.parse("/docs/manual/v12.0.0/introduction")
+ expect(result.version)->toEqual(Url.Version("v12.0.0"))
+ expect(result.base)->toEqual(["docs", "manual"])
+ expect(result.pagepath)->toEqual(["introduction"])
+ expect(result.fullpath)->toEqual(["docs", "manual", "v12.0.0", "introduction"])
+ })
+
+ test("parses version without v prefix matching latest (PR #1231)", async () => {
+ let result = Url.parse("/docs/manual/12.0.0/introduction")
+ // 12.0.0 matches Constants.versions.latest, so it becomes Latest
+ expect(result.version)->toEqual(Url.Latest)
+ expect(result.base)->toEqual(["docs", "manual"])
+ expect(result.pagepath)->toEqual(["introduction"])
+ expect(result.fullpath)->toEqual(["docs", "manual", "12.0.0", "introduction"])
+ })
+
+ test("parses latest keyword", async () => {
+ let result = Url.parse("/docs/manual/latest/arrays")
+ expect(result.version)->toEqual(Url.Latest)
+ expect(result.base)->toEqual(["docs", "manual"])
+ expect(result.pagepath)->toEqual(["arrays"])
+ })
+
+ test("parses 'next' string in URL (does not match env-based Next version)", async () => {
+ // "next" is matched by the regex, but Constants.versions.next is "13.0.0", not "next"
+ let result = Url.parse("/docs/manual/next/arrays")
+ expect(result.version)->toEqual(Url.Version("next"))
+ expect(result.base)->toEqual(["docs", "manual"])
+ expect(result.pagepath)->toEqual(["arrays"])
+ })
+
+ test("parses actual next version from env as Next", async () => {
+ let nextVer = Constants.versions.next
+ let result = Url.parse("/docs/manual/" ++ nextVer ++ "/arrays")
+ expect(result.version)->toEqual(Url.Next)
+ expect(result.base)->toEqual(["docs", "manual"])
+ expect(result.pagepath)->toEqual(["arrays"])
+ })
+
+ test("parses route with no version as NoVersion", async () => {
+ let result = Url.parse("/community/overview")
+ expect(result.version)->toEqual(Url.NoVersion)
+ expect(result.base)->toEqual(["community", "overview"])
+ expect(result.pagepath)->toEqual([])
+ })
+
+ test("parses short v-prefixed version (major.minor)", async () => {
+ let result = Url.parse("/apis/javascript/v7.1/node")
+ expect(result.version)->toEqual(Url.Version("v7.1"))
+ expect(result.base)->toEqual(["apis", "javascript"])
+ expect(result.pagepath)->toEqual(["node"])
+ })
+
+ test("parses short version without v prefix (major.minor, PR #1231)", async () => {
+ let result = Url.parse("/apis/javascript/7.1/node")
+ expect(result.version)->toEqual(Url.Version("7.1"))
+ expect(result.base)->toEqual(["apis", "javascript"])
+ expect(result.pagepath)->toEqual(["node"])
+ })
+
+ test("parses major-only version without v prefix (PR #1231)", async () => {
+ let result = Url.parse("/docs/manual/12/getting-started")
+ expect(result.version)->toEqual(Url.Version("12"))
+ expect(result.base)->toEqual(["docs", "manual"])
+ expect(result.pagepath)->toEqual(["getting-started"])
+ })
+})
diff --git a/__tests__/__screenshots__/SearchIndex_.test.jsx/slugify-collapses-multiple-spaces-into-single-hyphen-1.png b/__tests__/__screenshots__/SearchIndex_.test.jsx/slugify-collapses-multiple-spaces-into-single-hyphen-1.png
new file mode 100644
index 000000000..a35891721
Binary files /dev/null and b/__tests__/__screenshots__/SearchIndex_.test.jsx/slugify-collapses-multiple-spaces-into-single-hyphen-1.png differ
diff --git a/__tests__/__screenshots__/Search_.test.jsx/markdownToHtml-combined-transformations-bold-inside-code-stays-as-is--code-matched-first--1.png b/__tests__/__screenshots__/Search_.test.jsx/markdownToHtml-combined-transformations-bold-inside-code-stays-as-is--code-matched-first--1.png
new file mode 100644
index 000000000..a35891721
Binary files /dev/null and b/__tests__/__screenshots__/Search_.test.jsx/markdownToHtml-combined-transformations-bold-inside-code-stays-as-is--code-matched-first--1.png differ
diff --git a/__tests__/__screenshots__/Search_.test.jsx/markdownToHtml-markdown-link-stripping-handles-link-with-empty-text-1.png b/__tests__/__screenshots__/Search_.test.jsx/markdownToHtml-markdown-link-stripping-handles-link-with-empty-text-1.png
new file mode 100644
index 000000000..a35891721
Binary files /dev/null and b/__tests__/__screenshots__/Search_.test.jsx/markdownToHtml-markdown-link-stripping-handles-link-with-empty-text-1.png differ
diff --git a/__tests__/__screenshots__/Url_.test.jsx/Url-parse-version-detection-parses-next-keyword-1.png b/__tests__/__screenshots__/Url_.test.jsx/Url-parse-version-detection-parses-next-keyword-1.png
new file mode 100644
index 000000000..a35891721
Binary files /dev/null and b/__tests__/__screenshots__/Url_.test.jsx/Url-parse-version-detection-parses-next-keyword-1.png differ
diff --git a/__tests__/__screenshots__/Url_.test.jsx/Url-parse-version-detection-parses-version-without-v-prefix--PR--1231--1.png b/__tests__/__screenshots__/Url_.test.jsx/Url-parse-version-detection-parses-version-without-v-prefix--PR--1231--1.png
new file mode 100644
index 000000000..a35891721
Binary files /dev/null and b/__tests__/__screenshots__/Url_.test.jsx/Url-parse-version-detection-parses-version-without-v-prefix--PR--1231--1.png differ
diff --git a/package.json b/package.json
index f7dbf0389..38c8014e3 100644
--- a/package.json
+++ b/package.json
@@ -14,7 +14,8 @@
"build:generate-llms": "node _scripts/generate_llms.mjs",
"build:res": "rescript build --warn-error +3+8+11+12+26+27+31+32+33+34+35+39+44+45+110",
"build:sync-bundles": "node scripts/sync-playground-bundles.mjs",
- "build:update-index": "yarn build:generate-llms && node _scripts/generate_feed.mjs > public/blog/feed.xml",
+ "build:search-index": "node --env-file-if-exists=.env --env-file-if-exists=.env.local _scripts/generate_search_index.mjs",
+ "build:update-index": "yarn build:generate-llms && node _scripts/generate_feed.mjs > public/blog/feed.xml && yarn build:search-index",
"build:vite": "react-router build",
"build": "yarn build:res && yarn build:scripts && yarn build:update-index && yarn build:vite",
"ci:format": "prettier . --check --experimental-cli",
@@ -55,6 +56,7 @@
"@rescript/react": "^0.14.2",
"@rescript/webapi": "0.1.0-experimental-29db5f4",
"@tsnobip/rescript-lezer": "^0.8.0",
+ "algoliasearch": "^5.50.1",
"docson": "^2.1.0",
"fuse.js": "^6.6.2",
"highlight.js": "^11.11.1",
diff --git a/scripts/generate_search_index.res b/scripts/generate_search_index.res
new file mode 100644
index 000000000..617d5a21f
--- /dev/null
+++ b/scripts/generate_search_index.res
@@ -0,0 +1,223 @@
+// Build script: reads all site content, builds Algolia search records, and uploads them.
+// Runs as a standalone Node script via: node --env-file-if-exists=.env --env-file-if-exists=.env.local _scripts/generate_search_index.mjs
+//
+// Required env vars:
+// ALGOLIA_ADMIN_API_KEY -- API key with addObject/deleteObject/editSettings ACLs
+// ALGOLIA_INDEX_NAME -- e.g. "rescript-lang-dev" or "rescript-lang"
+//
+// If either is missing, the script logs a warning and exits 0 (graceful skip).
+
+let getEnv = (key: string): option =>
+ Node.Process.env
+ ->Dict.get(key)
+ ->Option.flatMap(v =>
+ switch v {
+ | "" => None
+ | s => Some(s)
+ }
+ )
+
+let compareVersions = (a: string, b: string): float => {
+ let parse = (v: string) =>
+ v
+ ->String.replaceRegExp(RegExp.fromString("^v", ~flags=""), "")
+ ->String.split(".")
+ ->Array.map(s => Int.fromString(s)->Option.getOr(0))
+ let partsA = parse(a)
+ let partsB = parse(b)
+ switch (partsA[0], partsB[0]) {
+ | (Some(a0), Some(b0)) if a0 !== b0 => Int.toFloat(a0 - b0)
+ | _ =>
+ switch (partsA[1], partsB[1]) {
+ | (Some(a1), Some(b1)) if a1 !== b1 => Int.toFloat(a1 - b1)
+ | _ =>
+ switch (partsA[2], partsB[2]) {
+ | (Some(a2), Some(b2)) => Int.toFloat(a2 - b2)
+ | _ => 0.0
+ }
+ }
+ }
+}
+
+let resolveApiDir = (): option => {
+ let majorVersion =
+ getEnv("VITE_VERSION_LATEST")
+ ->Option.map(v => v->String.replaceRegExp(RegExp.fromString("^v", ~flags=""), ""))
+ ->Option.flatMap(v => v->String.split(".")->Array.get(0))
+ switch majorVersion {
+ | None => {
+ Console.log("[search-index] VITE_VERSION_LATEST not set, cannot resolve API version.")
+ None
+ }
+ | Some(major) => {
+ let prefix = "v" ++ major ++ "."
+ let entries = Node.Fs.readdirSync("data/api")
+ let matching =
+ entries
+ ->Array.filter(entry => String.startsWith(entry, prefix))
+ ->Array.toSorted(compareVersions)
+ switch matching->Array.at(-1) {
+ | Some(dir) => {
+ Console.log(`[search-index] Resolved API version: ${dir}`)
+ Some("data/api/" ++ dir)
+ }
+ | None => {
+ Console.log(`[search-index] No API version found matching v${major}.*`)
+ None
+ }
+ }
+ }
+ }
+}
+
+let main = async () => {
+ let appId = getEnv("ALGOLIA_APP_ID")
+ let adminApiKey = getEnv("ALGOLIA_ADMIN_API_KEY")
+ let indexName = getEnv("ALGOLIA_INDEX_NAME")
+
+ switch (appId, adminApiKey, indexName) {
+ | (Some(appId), Some(apiKey), Some(idx)) => {
+ Console.log("[search-index] Building search index records...")
+
+ let apiDir = resolveApiDir()->Option.getOr("markdown-pages/docs/api")
+
+ // 1. Build records from all content sources
+ let manualRecords = SearchIndex.buildMarkdownRecords(
+ ~category="Manual",
+ ~basePath="/docs/manual",
+ ~dirPath="markdown-pages/docs/manual",
+ ~pageRank=100,
+ )
+ Console.log(
+ `[search-index] Manual docs: ${Int.toString(Array.length(manualRecords))} records`,
+ )
+
+ let reactRecords = SearchIndex.buildMarkdownRecords(
+ ~category="React",
+ ~basePath="/docs/react",
+ ~dirPath="markdown-pages/docs/react",
+ ~pageRank=90,
+ )
+ Console.log(
+ `[search-index] React docs: ${Int.toString(Array.length(reactRecords))} records`,
+ )
+
+ let communityRecords = SearchIndex.buildMarkdownRecords(
+ ~category="Community",
+ ~basePath="/community",
+ ~dirPath="markdown-pages/community",
+ ~pageRank=50,
+ )
+ Console.log(
+ `[search-index] Community: ${Int.toString(Array.length(communityRecords))} records`,
+ )
+
+ let blogRecords = SearchIndex.buildBlogRecords(~dirPath="markdown-pages/blog", ~pageRank=40)
+ Console.log(`[search-index] Blog: ${Int.toString(Array.length(blogRecords))} records`)
+
+ let syntaxRecords = SearchIndex.buildSyntaxLookupRecords(
+ ~dirPath="markdown-pages/syntax-lookup",
+ ~pageRank=70,
+ )
+ Console.log(
+ `[search-index] Syntax lookup: ${Int.toString(Array.length(syntaxRecords))} records`,
+ )
+
+ let stdlibApiRecords = SearchIndex.buildApiRecords(
+ ~basePath="/docs/manual/api",
+ ~dirPath=apiDir,
+ ~pageRank=80,
+ ~category="API / StdLib",
+ ~files=["stdlib.json"],
+ )
+ Console.log(
+ `[search-index] API / StdLib: ${Int.toString(Array.length(stdlibApiRecords))} records`,
+ )
+
+ let beltApiRecords = SearchIndex.buildApiRecords(
+ ~basePath="/docs/manual/api",
+ ~dirPath=apiDir,
+ ~pageRank=75,
+ ~category="API / Belt",
+ ~files=["belt.json"],
+ )
+ Console.log(
+ `[search-index] API / Belt: ${Int.toString(Array.length(beltApiRecords))} records`,
+ )
+
+ let domApiRecords = SearchIndex.buildApiRecords(
+ ~basePath="/docs/manual/api",
+ ~dirPath=apiDir,
+ ~pageRank=70,
+ ~category="API / DOM",
+ ~files=["dom.json"],
+ )
+ Console.log(
+ `[search-index] API / DOM: ${Int.toString(Array.length(domApiRecords))} records`,
+ )
+
+ // 2. Concatenate all records
+ let allRecords =
+ [
+ manualRecords,
+ reactRecords,
+ communityRecords,
+ blogRecords,
+ syntaxRecords,
+ stdlibApiRecords,
+ beltApiRecords,
+ domApiRecords,
+ ]->Array.flat
+
+ let totalCount = Array.length(allRecords)
+ Console.log(`[search-index] Total: ${Int.toString(totalCount)} records`)
+
+ // 3. Convert to JSON for Algolia
+ let jsonRecords = allRecords->Array.map(SearchIndex.toJson)
+
+ // 4. Initialize Algolia client and upload
+ let client = Algolia.make(appId, apiKey)
+
+ Console.log(`[search-index] Uploading to index "${idx}"...`)
+ let _ = await client->Algolia.replaceAllObjects({
+ indexName: idx,
+ objects: jsonRecords,
+ batchSize: 1000,
+ })
+ Console.log("[search-index] Records uploaded successfully.")
+
+ // 5. Configure index settings
+ Console.log("[search-index] Updating index settings...")
+ let _ = await client->Algolia.setSettings({
+ indexName: idx,
+ indexSettings: {
+ searchableAttributes: [
+ "hierarchy.lvl0",
+ "hierarchy.lvl1",
+ "hierarchy.lvl2",
+ "hierarchy.lvl3",
+ "hierarchy.lvl4",
+ "hierarchy.lvl5",
+ "hierarchy.lvl6",
+ "content",
+ ],
+ ranking: ["typo", "words", "attribute", "exact", "custom", "proximity", "filters"],
+ exactOnSingleWordQuery: "word",
+ attributesForFaceting: ["type"],
+ customRanking: ["desc(weight.pageRank)", "desc(weight.level)", "asc(weight.position)"],
+ attributesToSnippet: [],
+ attributeForDistinct: "hierarchy.lvl0",
+ },
+ })
+ Console.log("[search-index] Index settings updated.")
+
+ Console.log("[search-index] Done.")
+ }
+ | (None, _, _) => Console.log("[search-index] ALGOLIA_APP_ID not set, skipping index upload.")
+ | (_, None, _) =>
+ Console.log("[search-index] ALGOLIA_ADMIN_API_KEY not set, skipping index upload.")
+ | (_, _, None) => Console.log("[search-index] ALGOLIA_INDEX_NAME not set, skipping index upload.")
+ }
+}
+
+let _ = main()
diff --git a/src/bindings/Algolia.res b/src/bindings/Algolia.res
new file mode 100644
index 000000000..30cf205ca
--- /dev/null
+++ b/src/bindings/Algolia.res
@@ -0,0 +1,54 @@
+// Bindings for algoliasearch v5 SDK
+// https://github.com/algolia/algoliasearch-client-javascript
+
+module SearchClient = {
+ type t
+}
+
+module BatchResponse = {
+ type t
+}
+
+module SetSettingsResponse = {
+ type t
+}
+
+module IndexSettings = {
+ type t = {
+ searchableAttributes?: array,
+ attributesForFaceting?: array,
+ customRanking?: array,
+ ranking?: array,
+ attributesToSnippet?: array,
+ attributeForDistinct?: string,
+ exactOnSingleWordQuery?: string,
+ }
+}
+
+module ReplaceAllObjectsOptions = {
+ type t = {
+ indexName: string,
+ objects: array,
+ batchSize?: int,
+ }
+}
+
+module SetSettingsOptions = {
+ type t = {
+ indexName: string,
+ indexSettings: IndexSettings.t,
+ }
+}
+
+@module("algoliasearch")
+external make: (string, string) => SearchClient.t = "algoliasearch"
+
+@send
+external replaceAllObjects: (
+ SearchClient.t,
+ ReplaceAllObjectsOptions.t,
+) => promise> = "replaceAllObjects"
+
+@send
+external setSettings: (SearchClient.t, SetSettingsOptions.t) => promise =
+ "setSettings"
diff --git a/src/bindings/DocSearch.res b/src/bindings/DocSearch.res
index 0c42d8586..a97c2540f 100644
--- a/src/bindings/DocSearch.res
+++ b/src/bindings/DocSearch.res
@@ -44,7 +44,12 @@ type item = {itemUrl: string}
type navigator = {navigate: item => unit}
-type searchParameters = {facetFilters: array}
+type searchParameters = {
+ facetFilters?: array,
+ hitsPerPage?: int,
+ distinct?: int,
+ attributesToSnippet?: array,
+}
@module("@docsearch/react") @react.component
external make: (
diff --git a/src/bindings/Env.res b/src/bindings/Env.res
index 29a6c4e1b..174ec8da9 100644
--- a/src/bindings/Env.res
+++ b/src/bindings/Env.res
@@ -9,3 +9,8 @@ let root_url = switch deployment_url {
| Some(url) => url
| None => dev ? "http://localhost:5173/" : "https://rescript-lang.org/"
}
+
+// Algolia search configuration (read from .env via Vite)
+external algolia_app_id: string = "import.meta.env.VITE_ALGOLIA_APP_ID"
+external algolia_read_api_key: string = "import.meta.env.VITE_ALGOLIA_READ_API_KEY"
+external algolia_index_name: string = "import.meta.env.VITE_ALGOLIA_INDEX_NAME"
diff --git a/src/bindings/Vitest.res b/src/bindings/Vitest.res
index b8f20fef8..c6b7cbe74 100644
--- a/src/bindings/Vitest.res
+++ b/src/bindings/Vitest.res
@@ -9,6 +9,9 @@ type mock
@module("vitest")
external test: (string, unit => promise) => unit = "test"
+@module("vitest")
+external describe: (string, unit => unit) => unit = "describe"
+
@module("vitest") @scope("vi")
external fn: unit => 'a => 'b = "fn"
@@ -65,6 +68,9 @@ external click: element => promise = "click"
@send
external toBe: (expect, 'a) => unit = "toBe"
+@send
+external toEqual: (expect, 'a) => unit = "toEqual"
+
@send
external toHaveBeenCalled: expect => unit = "toHaveBeenCalled"
diff --git a/src/common/SearchIndex.res b/src/common/SearchIndex.res
new file mode 100644
index 000000000..79d4260fa
--- /dev/null
+++ b/src/common/SearchIndex.res
@@ -0,0 +1,505 @@
+type hierarchy = {
+ lvl0: string,
+ lvl1: string,
+ lvl2: option,
+ lvl3: option,
+ lvl4: option,
+ lvl5: option,
+ lvl6: option,
+}
+
+type weight = {
+ pageRank: int,
+ level: int,
+ position: int,
+}
+
+type record = {
+ objectID: string,
+ url: string,
+ url_without_anchor: string,
+ anchor: option,
+ content: option,
+ @as("type") type_: string,
+ hierarchy: hierarchy,
+ weight: weight,
+}
+
+type heading = {
+ level: int,
+ text: string,
+ content: string,
+}
+
+let maxContentLength = 500
+
+let makeHierarchy = (~lvl0, ~lvl1, ~lvl2=?, ~lvl3=?, ~lvl4=?, ~lvl5=?, ~lvl6=?, ()) => {
+ lvl0,
+ lvl1,
+ lvl2,
+ lvl3,
+ lvl4,
+ lvl5,
+ lvl6,
+}
+
+let truncate = (str: string, ~maxLen: int): string =>
+ switch String.length(str) > maxLen {
+ | true => String.slice(str, ~start=0, ~end=maxLen) ++ "..."
+ | false => str
+ }
+
+// --- Helpers ---
+
+let slugify = (text: string): string =>
+ text
+ ->String.toLowerCase
+ ->String.replaceRegExp(RegExp.fromString("\\s+", ~flags="g"), "-")
+ ->String.replaceRegExp(RegExp.fromString("[^a-z0-9\\-]", ~flags="g"), "")
+
+let stripMdxTags = (text: string): string =>
+ text
+ ->String.replaceRegExp(RegExp.fromString("", ~flags="g"), "")
+ ->String.replaceRegExp(RegExp.fromString("<[^>]+>", ~flags="g"), "")
+ ->String.replaceRegExp(RegExp.fromString("```[\\s\\S]*?```", ~flags="g"), "")
+ ->String.replaceRegExp(RegExp.fromString("`([^`]+)`", ~flags="g"), "$1")
+ ->String.replaceRegExp(RegExp.fromString("\\*\\*([^*]+)\\*\\*", ~flags="g"), "$1")
+ ->String.replaceRegExp(RegExp.fromString("\\*([^*]+)\\*", ~flags="g"), "$1")
+ ->String.replaceRegExp(RegExp.fromString("\\[([^\\]]+)\\]\\([^)]*\\)", ~flags="g"), "$1")
+ ->String.replaceRegExp(RegExp.fromString("^#{1,6}\\s+", ~flags="gm"), "")
+ ->String.replaceRegExp(RegExp.fromString("\\n{2,}", ~flags="g"), "\n")
+ ->String.trim
+
+let cleanDocstring = (text: string): string =>
+ text
+ // Take content before first heading
+ ->String.split("\n## ")
+ ->Array.get(0)
+ ->Option.getOr(text)
+ // Take content before first code block
+ ->String.split("\n```")
+ ->Array.get(0)
+ ->Option.getOr(text)
+ // Strip inline code backticks
+ ->String.replaceRegExp(RegExp.fromString("`([^`]+)`", ~flags="g"), "$1")
+ // Strip bold
+ ->String.replaceRegExp(RegExp.fromString("\\*\\*([^*]+)\\*\\*", ~flags="g"), "$1")
+ // Strip italic
+ ->String.replaceRegExp(RegExp.fromString("\\*([^*]+)\\*", ~flags="g"), "$1")
+ // Strip links
+ ->String.replaceRegExp(RegExp.fromString("\\[([^\\]]+)\\]\\([^)]*\\)", ~flags="g"), "$1")
+ // Collapse multiple newlines into space
+ ->String.replaceRegExp(RegExp.fromString("\\n{2,}", ~flags="g"), " ")
+ // Replace remaining newlines with space
+ ->String.replaceRegExp(RegExp.fromString("\\n", ~flags="g"), " ")
+ ->String.trim
+
+let extractIntro = (content: string): string => {
+ let parts = content->String.split("\n## ")
+ let intro = parts[0]->Option.getOr("")
+ intro
+ // Remove the # H1 heading line if present at the start
+ ->String.replaceRegExp(RegExp.fromString("^#[^#].*\\n", ~flags=""), "")
+ ->stripMdxTags
+ ->String.trim
+}
+
+let findHeadingMatches: string => array<{..}> = %raw(`
+ function(content) {
+ var regex = /^(#{2,6})\s+(.+)$/gm;
+ var results = [];
+ var match;
+ while ((match = regex.exec(content)) !== null) {
+ results.push({ index: match.index, level: match[1].length, text: match[2] });
+ }
+ return results;
+ }
+`)
+
+let extractHeadings = (content: string): array => {
+ let matches = findHeadingMatches(content)
+
+ matches->Array.mapWithIndex((m, i) => {
+ let startIdx = m["index"] + String.length(m["text"]) + m["level"] + 2
+ let endIdx = switch matches[i + 1] {
+ | Some(next) => next["index"]
+ | None => String.length(content)
+ }
+ let sectionContent =
+ content
+ ->String.slice(~start=startIdx, ~end=endIdx)
+ ->stripMdxTags
+ ->String.trim
+ ->truncate(~maxLen=maxContentLength)
+
+ {
+ level: m["level"],
+ text: m["text"],
+ content: sectionContent,
+ }
+ })
+}
+
+// --- File collection ---
+
+let rec collectFiles = (dirPath: string): array => {
+ let entries = Node.Fs.readdirSync(dirPath)
+ entries->Array.reduce([], (acc, entry) => {
+ let fullPath = Node.Path.join([dirPath, entry])
+ let stats = Node.Fs.statSync(fullPath)
+ switch stats["isDirectory"]() {
+ | true => acc->Array.concat(collectFiles(fullPath))
+ | false => {
+ acc->Array.push(fullPath)
+ acc
+ }
+ }
+ })
+}
+
+let isMdxFile = (path: string): bool => Node.Path.extname(path) === ".mdx"
+
+let filenameWithoutExt = (path: string): string =>
+ Node.Path.basename(path)->String.replace(".mdx", "")
+
+// --- Record builders ---
+
+let buildMarkdownRecords = (
+ ~category: string,
+ ~basePath: string,
+ ~dirPath: string,
+ ~pageRank: int,
+): array => {
+ collectFiles(dirPath)
+ ->Array.filter(isMdxFile)
+ ->Array.flatMap(filePath => {
+ let fileContent = Node.Fs.readFileSync2(filePath, "utf8")
+ let parsed = MarkdownParser.parseSync(fileContent)
+
+ switch DocFrontmatter.decode(parsed.frontmatter) {
+ | None => []
+ | Some(fm) => {
+ let pageUrl = switch fm.canonical->Null.toOption {
+ | Some(canonical) => canonical
+ | None => basePath ++ "/" ++ filenameWithoutExt(filePath)
+ }
+
+ let introText = parsed.content->extractIntro->truncate(~maxLen=maxContentLength)
+ let pageContent = switch introText {
+ | "" => fm.description->Null.toOption->Option.getOr("")
+ | text => text
+ }
+
+ let pageRecord = {
+ objectID: pageUrl,
+ url: pageUrl,
+ url_without_anchor: pageUrl,
+ anchor: None,
+ content: Some(pageContent->truncate(~maxLen=maxContentLength)),
+ type_: "lvl1",
+ hierarchy: makeHierarchy(~lvl0=category, ~lvl1=fm.title, ()),
+ weight: {pageRank, level: 100, position: 0},
+ }
+
+ let headingRecords =
+ parsed.content
+ ->extractHeadings
+ ->Array.mapWithIndex((heading, i) => {
+ let anchor = slugify(heading.text)
+ let headingUrl = pageUrl ++ "#" ++ anchor
+ let typeLvl = switch heading.level {
+ | 2 => "lvl2"
+ | 3 => "lvl3"
+ | 4 => "lvl4"
+ | 5 => "lvl5"
+ | _ => "lvl6"
+ }
+ let weightLevel = switch heading.level {
+ | 2 => 80
+ | 3 => 60
+ | 4 => 40
+ | 5 => 20
+ | _ => 10
+ }
+ let hierarchy = switch heading.level {
+ | 2 => makeHierarchy(~lvl0=category, ~lvl1=fm.title, ~lvl2=heading.text, ())
+ | 3 =>
+ makeHierarchy(
+ ~lvl0=category,
+ ~lvl1=fm.title,
+ ~lvl2=heading.text,
+ ~lvl3=heading.text,
+ (),
+ )
+ | 4 =>
+ makeHierarchy(
+ ~lvl0=category,
+ ~lvl1=fm.title,
+ ~lvl2=heading.text,
+ ~lvl3=heading.text,
+ ~lvl4=heading.text,
+ (),
+ )
+ | _ => makeHierarchy(~lvl0=category, ~lvl1=fm.title, ~lvl2=heading.text, ())
+ }
+
+ {
+ objectID: headingUrl,
+ url: headingUrl,
+ url_without_anchor: pageUrl,
+ anchor: Some(anchor),
+ content: switch heading.content {
+ | "" => None
+ | c => Some(c)
+ },
+ type_: typeLvl,
+ hierarchy,
+ weight: {pageRank, level: weightLevel, position: i + 1},
+ }
+ })
+
+ [pageRecord]->Array.concat(headingRecords)
+ }
+ }
+ })
+}
+
+let buildBlogRecords = (~dirPath: string, ~pageRank: int): array => {
+ open JSON
+ Node.Fs.readdirSync(dirPath)
+ ->Array.filter(entry => isMdxFile(entry) && entry !== "archived")
+ ->Array.filterMap(entry => {
+ let fullPath = Node.Path.join([dirPath, entry])
+ let stats = Node.Fs.statSync(fullPath)
+ switch stats["isDirectory"]() {
+ | true => None
+ | false => {
+ let fileContent = Node.Fs.readFileSync2(fullPath, "utf8")
+ let parsed = MarkdownParser.parseSync(fileContent)
+
+ switch parsed.frontmatter {
+ | Object(dict{"title": String(title), "description": ?description}) => {
+ let slug = filenameWithoutExt(fullPath)
+ let url = "/blog/" ++ slug
+ let desc = switch description {
+ | Some(String(d)) => Some(d->truncate(~maxLen=maxContentLength))
+ | _ => None
+ }
+
+ Some({
+ objectID: url,
+ url,
+ url_without_anchor: url,
+ anchor: None,
+ content: desc,
+ type_: "lvl1",
+ hierarchy: makeHierarchy(~lvl0="Blog", ~lvl1=title, ()),
+ weight: {pageRank, level: 100, position: 0},
+ })
+ }
+ | _ => None
+ }
+ }
+ }
+ })
+}
+
+let buildSyntaxLookupRecords = (~dirPath: string, ~pageRank: int): array => {
+ open JSON
+ Node.Fs.readdirSync(dirPath)
+ ->Array.filter(isMdxFile)
+ ->Array.filterMap(entry => {
+ let fullPath = Node.Path.join([dirPath, entry])
+ let fileContent = Node.Fs.readFileSync2(fullPath, "utf8")
+ let parsed = MarkdownParser.parseSync(fileContent)
+
+ switch parsed.frontmatter {
+ | Object(dict{
+ "id": String(id),
+ "name": String(name),
+ "summary": String(summary),
+ "keywords": ?_keywords,
+ }) =>
+ Some({
+ objectID: "syntax-" ++ id,
+ url: "/syntax-lookup",
+ url_without_anchor: "/syntax-lookup",
+ anchor: None,
+ content: Some(summary->truncate(~maxLen=maxContentLength)),
+ type_: "lvl1",
+ hierarchy: makeHierarchy(~lvl0="Syntax", ~lvl1=name, ()),
+ weight: {pageRank, level: 100, position: 0},
+ })
+ | _ => None
+ }
+ })
+}
+
+let buildApiRecords = (
+ ~basePath: string,
+ ~dirPath: string,
+ ~pageRank: int,
+ ~category: string,
+ ~files: option>=?,
+): array => {
+ open JSON
+ Node.Fs.readdirSync(dirPath)
+ ->Array.filter(entry => {
+ let isJson = String.endsWith(entry, ".json") && entry !== "toc_tree.json"
+ switch files {
+ | Some(allowed) => isJson && allowed->Array.includes(entry)
+ | None => isJson
+ }
+ })
+ ->Array.flatMap(entry => {
+ let fullPath = Node.Path.join([dirPath, entry])
+ let fileContent = Node.Fs.readFileSync2(fullPath, "utf8")
+
+ switch JSON.parseOrThrow(fileContent) {
+ | Object(modules) =>
+ modules
+ ->Dict.toArray
+ ->Array.flatMap(((key, moduleJson)) => {
+ switch moduleJson {
+ | Object(dict{
+ "id": String(id),
+ "name": String(name),
+ "docstrings": Array(docstrings),
+ "items": Array(items),
+ }) => {
+ let moduleUrl = basePath ++ "/" ++ key
+ let moduleDocstring = switch docstrings[0] {
+ | Some(String(d)) => Some(d->cleanDocstring->truncate(~maxLen=maxContentLength))
+ | _ => None
+ }
+
+ let moduleRecord = {
+ objectID: id,
+ url: moduleUrl,
+ url_without_anchor: moduleUrl,
+ anchor: None,
+ content: moduleDocstring,
+ type_: "lvl1",
+ hierarchy: makeHierarchy(~lvl0=category, ~lvl1=name, ()),
+ weight: {pageRank, level: 90, position: 0},
+ }
+
+ let sortedItems = items->Array.toSorted(
+ (a, b) => {
+ switch (a, b) {
+ | (Object(dict{"name": String(nameA)}), Object(dict{"name": String(nameB)})) =>
+ nameA->String.localeCompare(nameB)
+ | _ => 0.
+ }
+ },
+ )
+
+ let itemRecords = sortedItems->Array.filterMapWithIndex(
+ (item, i) => {
+ switch item {
+ | Object(dict{
+ "id": String(itemId),
+ "name": String(itemName),
+ "docstrings": Array(itemDocstrings),
+ "signature": ?signature,
+ "kind": String(kind),
+ }) => {
+ let kindPrefix = switch kind {
+ | "type" => "type-"
+ | _ => "value-"
+ }
+ let itemAnchor = kindPrefix ++ itemName
+ let itemUrl = moduleUrl ++ "#" ++ itemAnchor
+ let qualifiedName = name ++ "." ++ itemName
+ let docstringIntro = switch itemDocstrings[0] {
+ | Some(String(d)) if String.length(d) > 0 => {
+ // Take content before first heading or code block
+ let intro =
+ d
+ ->String.split("\n## ")
+ ->Array.get(0)
+ ->Option.getOr(d)
+ ->String.split("\n```")
+ ->Array.get(0)
+ ->Option.getOr(d)
+ ->String.trim
+ Some(intro->truncate(~maxLen=2000))
+ }
+ | _ => None
+ }
+ let content = switch docstringIntro {
+ | Some(d) if String.length(d) > 0 => Some(d)
+ | _ =>
+ switch signature {
+ | Some(String(s)) => Some(s)
+ | _ => None
+ }
+ }
+
+ Some({
+ objectID: itemId,
+ url: itemUrl,
+ url_without_anchor: moduleUrl,
+ anchor: Some(itemAnchor),
+ content,
+ type_: "lvl1",
+ hierarchy: makeHierarchy(~lvl0=category, ~lvl1=qualifiedName, ()),
+ weight: {pageRank, level: 70, position: i},
+ })
+ }
+ | _ => None
+ }
+ },
+ )
+
+ [moduleRecord]->Array.concat(itemRecords)
+ }
+ | _ => []
+ }
+ })
+ | _ => []
+ | exception _ => []
+ }
+ })
+}
+
+// --- JSON serialization ---
+
+let optionToJson = (opt: option): JSON.t =>
+ switch opt {
+ | Some(s) => JSON.String(s)
+ | None => JSON.Null
+ }
+
+let hierarchyToJson = (h: hierarchy): JSON.t => {
+ let dict = Dict.make()
+ dict->Dict.set("lvl0", JSON.String(h.lvl0))
+ dict->Dict.set("lvl1", JSON.String(h.lvl1))
+ dict->Dict.set("lvl2", optionToJson(h.lvl2))
+ dict->Dict.set("lvl3", optionToJson(h.lvl3))
+ dict->Dict.set("lvl4", optionToJson(h.lvl4))
+ dict->Dict.set("lvl5", optionToJson(h.lvl5))
+ dict->Dict.set("lvl6", optionToJson(h.lvl6))
+ JSON.Object(dict)
+}
+
+let weightToJson = (w: weight): JSON.t => {
+ let dict = Dict.make()
+ dict->Dict.set("pageRank", JSON.Number(Int.toFloat(w.pageRank)))
+ dict->Dict.set("level", JSON.Number(Int.toFloat(w.level)))
+ dict->Dict.set("position", JSON.Number(Int.toFloat(w.position)))
+ JSON.Object(dict)
+}
+
+let toJson = (r: record): JSON.t => {
+ let dict = Dict.make()
+ dict->Dict.set("objectID", JSON.String(r.objectID))
+ dict->Dict.set("url", JSON.String(r.url))
+ dict->Dict.set("url_without_anchor", JSON.String(r.url_without_anchor))
+ dict->Dict.set("anchor", optionToJson(r.anchor))
+ dict->Dict.set("content", optionToJson(r.content))
+ dict->Dict.set("type", JSON.String(r.type_))
+ dict->Dict.set("hierarchy", hierarchyToJson(r.hierarchy))
+ dict->Dict.set("weight", weightToJson(r.weight))
+ JSON.Object(dict)
+}
diff --git a/src/common/SearchIndex.resi b/src/common/SearchIndex.resi
new file mode 100644
index 000000000..435e81eb2
--- /dev/null
+++ b/src/common/SearchIndex.resi
@@ -0,0 +1,84 @@
+type hierarchy = {
+ lvl0: string,
+ lvl1: string,
+ lvl2: option,
+ lvl3: option,
+ lvl4: option,
+ lvl5: option,
+ lvl6: option,
+}
+
+type weight = {
+ pageRank: int,
+ level: int,
+ position: int,
+}
+
+type record = {
+ objectID: string,
+ url: string,
+ url_without_anchor: string,
+ anchor: option,
+ content: option,
+ @as("type") type_: string,
+ hierarchy: hierarchy,
+ weight: weight,
+}
+
+type heading = {
+ level: int,
+ text: string,
+ content: string,
+}
+
+let maxContentLength: int
+
+let makeHierarchy: (
+ ~lvl0: string,
+ ~lvl1: string,
+ ~lvl2: string=?,
+ ~lvl3: string=?,
+ ~lvl4: string=?,
+ ~lvl5: string=?,
+ ~lvl6: string=?,
+ unit,
+) => hierarchy
+
+let truncate: (string, ~maxLen: int) => string
+
+let slugify: string => string
+
+let stripMdxTags: string => string
+
+let cleanDocstring: string => string
+
+let extractIntro: string => string
+
+let extractHeadings: string => array
+
+let optionToJson: option => JSON.t
+
+let hierarchyToJson: hierarchy => JSON.t
+
+let weightToJson: weight => JSON.t
+
+let buildMarkdownRecords: (
+ ~category: string,
+ ~basePath: string,
+ ~dirPath: string,
+ ~pageRank: int,
+) => array
+
+let buildBlogRecords: (~dirPath: string, ~pageRank: int) => array
+
+let buildSyntaxLookupRecords: (~dirPath: string, ~pageRank: int) => array
+
+let buildApiRecords: (
+ ~basePath: string,
+ ~dirPath: string,
+ ~pageRank: int,
+ ~category: string,
+ ~files: array=?,
+) => array
+
+let toJson: record => JSON.t
diff --git a/src/common/Url.res b/src/common/Url.res
index fb31e28cd..0c7538b6d 100644
--- a/src/common/Url.res
+++ b/src/common/Url.res
@@ -58,7 +58,7 @@ let prettyString = (str: string) => {
let parse = (route: string): t => {
let fullpath = route->String.split("/")->Array.filter(s => s !== "")
let foundVersionIndex = Array.findIndex(fullpath, chunk => {
- RegExp.test(/latest|next|v\d+(\.\d+)?(\.\d+)?/, chunk)
+ RegExp.test(/latest|next|v?\d+(\.\d+)?(\.\d+)?/, chunk)
})
let (version, base, pagepath) = if foundVersionIndex == -1 {
diff --git a/src/components/Icon.res b/src/components/Icon.res
index daac2bdf4..7830a805d 100644
--- a/src/components/Icon.res
+++ b/src/components/Icon.res
@@ -291,3 +291,84 @@ module Clipboard = {
}
+
+module DocPage = {
+ @react.component
+ let make = () =>
+
+
+
+}
+
+module DocHash = {
+ @react.component
+ let make = () =>
+
+
+
+}
+
+module DocTree = {
+ @react.component
+ let make = () =>
+
+}
+
+module DocSelect = {
+ @react.component
+ let make = () =>
+
+
+
+}
diff --git a/src/components/Icon.resi b/src/components/Icon.resi
index 4087c13b6..df1f0e24b 100644
--- a/src/components/Icon.resi
+++ b/src/components/Icon.resi
@@ -82,3 +82,23 @@ module Clipboard: {
@react.component
let make: (~className: string=?) => React.element
}
+
+module DocPage: {
+ @react.component
+ let make: unit => React.element
+}
+
+module DocHash: {
+ @react.component
+ let make: unit => React.element
+}
+
+module DocTree: {
+ @react.component
+ let make: unit => React.element
+}
+
+module DocSelect: {
+ @react.component
+ let make: unit => React.element
+}
diff --git a/src/components/Meta.res b/src/components/Meta.res
index 3479d57f5..903af53c8 100644
--- a/src/components/Meta.res
+++ b/src/components/Meta.res
@@ -65,8 +65,6 @@ let make = (
- // Docsearch meta tags
-
// Robots meta tag
>
diff --git a/src/components/Search.res b/src/components/Search.res
index b9ccbb103..4da1eb36d 100644
--- a/src/components/Search.res
+++ b/src/components/Search.res
@@ -1,103 +1,94 @@
-let apiKey = "a2485ef172b8cd82a2dfa498d551399b"
-let indexName = "rescript-lang"
-let appId = "S32LNEY41T"
+let apiKey = Env.algolia_read_api_key
+let indexName = Env.algolia_index_name
+let appId = Env.algolia_app_id
type state = Active | Inactive
-let hit = ({hit, children}: DocSearch.hitComponent) => {
- let toTitle = str => str->String.charAt(0)->String.toUpperCase ++ String.slice(str, ~start=1)
-
- let description = switch hit.url
- ->String.split("/")
- ->Array.slice(~start=1)
- ->List.fromArray {
- | list{"blog" as r | "community" as r, ..._} => r->toTitle
- | list{"docs", doc, version, ...rest} =>
- let path = rest->List.toArray
-
- let info =
- path
- ->Array.slice(~start=0, ~end=Array.length(path) - 1)
- ->Array.map(path =>
- switch path {
- | "api" => "API"
- | other => toTitle(other)
- }
- )
-
- [doc->toTitle, version->toTitle]->Array.concat(info)->Array.join(" / ")
- | _ => ""
- }
-
- let isDeprecated = hit.deprecated->Option.isSome
- let deprecatedBadge = isDeprecated
- ?
- {"Deprecated"->React.string}
-
- : React.null
-
-
-
- {deprecatedBadge}
- {description->React.string}
-
- children
-
+let navigator: DocSearch.navigator = {
+ navigate: ({itemUrl}) => {
+ ReactRouter.navigate(itemUrl)
+ },
}
-let transformItems = (items: DocSearch.transformItems) => {
- items
- ->Array.filterMap(item => {
- let url = try WebAPI.URL.make(~url=item.url)->Some catch {
- | JsExn(obj) =>
- Console.error2(`Failed to parse URL ${item.url}`, obj)
- None
- }
- switch url {
- | Some({pathname, hash}) =>
- RegExp.test(/v(8|9|10|11)\./, pathname)
- ? None
- : {
- // DocSearch internally calls .replace() on hierarchy.lvl1, so we must
- // provide a fallback for items where lvl1 is null to prevent crashes
- let hierarchy = item.hierarchy
- let lvl0 = hierarchy.lvl0->Nullable.toOption->Option.getOr("")
- let lvl1 = hierarchy.lvl1->Nullable.toOption->Option.getOr(lvl0)
- Some({
- ...item,
- deprecated: pathname->String.includes("api/js") ||
- pathname->String.includes("api/core")
- ? Some("Deprecated")
- : None,
- url: pathname->String.replace("/v12.0.0/", "/") ++ hash,
- hierarchy: {
- ...hierarchy,
- lvl0: Nullable.make(lvl0),
- lvl1: Nullable.make(lvl1),
- },
- })
- }
-
- | None => None
- }
- })
- // Sort deprecated items to the end
- ->Array.toSorted((a, b) => {
- switch (a.deprecated, b.deprecated) {
- | (Some(_), None) => 1. // a is deprecated, b is not - put a after b
- | (None, Some(_)) => -1. // a is not deprecated, b is - put a before b
- | _ => 0.
- }
- })
- ->Array.toSorted((a, b) => {
- switch (a.url->String.includes("api/stdlib"), b.url->String.includes("api/stdlib")) {
- | (true, false) => -1. // a is a stdlib doc, b is not - put a before b
- | (false, true) => 1. // a is not a stdlib doc, b is - put a after b
- | _ => 0. // both same API status - maintain original order
+let getHighlightedTitle: DocSearch.docSearchHit => string = %raw(`
+ function(hit) {
+ var type = hit.type;
+ var h = hit._highlightResult && hit._highlightResult.hierarchy;
+ var raw = hit.hierarchy;
+ try {
+ if (type && type !== 'lvl1' && type !== 'lvl0') {
+ var lvl = h && h[type] && h[type].value;
+ if (lvl) return lvl;
+ }
+ if (h && h.lvl1 && h.lvl1.value) return h.lvl1.value;
+ } catch(e) {}
+ return (raw && raw.lvl1) || '';
+ }
+`)
+
+let getSubtitle: DocSearch.docSearchHit => option = %raw(`
+ function(hit) {
+ var type = hit.type;
+ if (type && type !== 'lvl1' && type !== 'lvl0') {
+ var raw = hit.hierarchy;
+ if (raw && raw.lvl1) return raw.lvl1;
}
- })
+ return undefined;
+ }
+`)
+
+let markdownToHtml = (text: string): string =>
+ text
+ // Strip stray backslashes from MDX processing
+ ->String.replaceRegExp(RegExp.fromString("^\\\\\\s+", ~flags=""), "")
+ ->String.replaceRegExp(RegExp.fromString("\\\\\\s+", ~flags="g"), " ")
+ ->String.replaceRegExp(
+ RegExp.fromString("See\\s+\\[([^\\]]+)\\]\\([^)]*\\)\\s+on MDN\\.?", ~flags="g"),
+ "",
+ )
+ ->String.replaceRegExp(RegExp.fromString("See\\s+\\S+\\s+on MDN\\.?", ~flags="g"), "")
+ ->String.replaceRegExp(RegExp.fromString("\\[([^\\]]+)\\]\\([^)]*\\)", ~flags="g"), "$1")
+ ->String.replaceRegExp(RegExp.fromString("\\x60([^\\x60]+)\\x60", ~flags="g"), "$1")
+ ->String.replaceRegExp(
+ RegExp.fromString("\\*\\*([^*]+)\\*\\*", ~flags="g"),
+ "$1",
+ )
+ ->String.replaceRegExp(RegExp.fromString("\\*([^*]+)\\*", ~flags="g"), "$1")
+ ->String.replaceRegExp(RegExp.fromString("\\n{2,}", ~flags="g"), "
")
+ ->String.replaceRegExp(RegExp.fromString("\\n", ~flags="g"), " ")
+ ->String.trim
+
+let isChildHit = (hit: DocSearch.docSearchHit) =>
+ switch hit.type_ {
+ | Lvl2 | Lvl3 | Lvl4 | Lvl5 | Lvl6 | Content => true
+ | Lvl0 | Lvl1 => hit.url->String.includes("#")
+ }
+
+let hitComponent = ({hit, children: _}: DocSearch.hitComponent): React.element => {
+ let titleHtml = getHighlightedTitle(hit)
+ let subtitle = getSubtitle(hit)
+ let contentHtml = hit.content->Nullable.toOption->Option.map(markdownToHtml)
+ let isChild = isChildHit(hit)
+
+
+
+ {isChild ? : React.null}
+ {isChild ? : }
+
+
+ {switch subtitle {
+ | Some(s) => {React.string(s)}
+ | None => React.null
+ }}
+ {switch contentHtml {
+ | Some(c) if String.length(c) > 0 =>
+
+ | _ => React.null
+ }}
+
+
+
+
}
@react.component
@@ -140,7 +131,6 @@ let make = () => {
switch e.key {
| "/" => focusSearch(e)
| "k" if e.ctrlKey || e.metaKey => focusSearch(e)
- | "Escape" => handleCloseModal()
| _ => ()
}
}
@@ -174,10 +164,11 @@ let make = () => {
apiKey
appId
indexName
+ navigator
+ hitComponent
onClose
initialScrollY={window.scrollY->Float.toInt}
- transformItems={transformItems}
- hitComponent=hit
+ searchParameters={distinct: 3, hitsPerPage: 20, attributesToSnippet: ["content:9999"]}
/>,
element,
)
diff --git a/styles/_docsearch.css b/styles/_docsearch.css
index ac8c167b7..ba5f02688 100644
--- a/styles/_docsearch.css
+++ b/styles/_docsearch.css
@@ -137,17 +137,17 @@
@apply hidden;
}
+.DocSearch-Clear {
+ @apply hidden;
+}
+
.DocSearch-LoadingIndicator svg,
.DocSearch-MagnifierLabel svg {
@apply w-4 h-4;
}
.DocSearch-Cancel {
- font-size: 0;
- background-image: url("data:image/svg+xml,%3Csvg width='16' height='7' fill='none' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M.506 6h3.931V4.986H1.736v-1.39h2.488V2.583H1.736V1.196h2.69V.182H.506V6ZM8.56 1.855h1.18C9.721.818 8.87.102 7.574.102c-1.276 0-2.21.705-2.205 1.762-.003.858.602 1.35 1.585 1.585l.634.159c.633.153.986.335.988.727-.002.426-.406.716-1.03.716-.64 0-1.1-.295-1.14-.878h-1.19c.03 1.259.931 1.91 2.343 1.91 1.42 0 2.256-.68 2.259-1.745-.003-.969-.733-1.483-1.744-1.71l-.523-.125c-.506-.117-.93-.304-.92-.722 0-.375.332-.65.934-.65.588 0 .949.267.994.724ZM15.78 2.219C15.618.875 14.6.102 13.254.102c-1.537 0-2.71 1.086-2.71 2.989 0 1.898 1.153 2.989 2.71 2.989 1.492 0 2.392-.992 2.526-2.063l-1.244-.006c-.117.623-.606.98-1.262.98-.883 0-1.483-.656-1.483-1.9 0-1.21.591-1.9 1.492-1.9.673 0 1.159.389 1.253 1.028h1.244Z' fill='%2394a3b8'/%3E%3C/svg%3E") !important;
- background-size: 57.1428571429% auto;
- @apply w-9 h-7 bg-no-repeat bg-center appearance-none border border-gray-20
- rounded;
+ display: none !important;
}
/* Modal Dropdown */
@@ -273,8 +273,16 @@ svg.DocSearch-Hit-Select-Icon {
@apply text-14 text-gray-60;
}
+.DocSearch-Hit-subtitle {
+ @apply text-12 text-gray-40;
+}
+
.DocSearch-Hit-path {
- @apply text-12;
+ @apply text-14 text-gray-60;
+}
+
+.DocSearch-Hit-path code {
+ @apply bg-gray-10 text-black rounded-sm px-1 py-0.5 text-12 font-mono;
}
.DocSearch-Hit[aria-selected="true"] .DocSearch-Hit-title,
@@ -319,12 +327,40 @@ svg.DocSearch-Hit-Select-Icon {
/* Modal Footer */
.DocSearch-Footer {
- @apply flex flex-row-reverse flex-shrink-0 justify-between relative
- select-none w-full z-100 p-4;
+ border-top: 1px solid;
+ @apply flex flex-shrink-0 items-center justify-between relative
+ select-none w-full z-100 px-4 py-3 border-gray-20;
}
.DocSearch-Commands {
- display: none !important;
+ @apply flex items-center gap-3 list-none m-0 p-0;
+}
+
+.DocSearch-Commands li {
+ @apply flex items-center gap-1.5 text-12 text-gray-40;
+}
+
+.DocSearch-Commands-Key {
+ @apply inline-flex items-center justify-center w-5 h-5 rounded
+ border border-gray-20 bg-gray-5 text-11 text-gray-60 font-medium;
+}
+
+/* Swap "to close" / "to clear" based on whether the input has a query.
+ :placeholder-shown is true when the input is empty, false when it has text. */
+.DocSearch-Commands li:last-child .DocSearch-Label {
+ font-size: 0;
+}
+
+.DocSearch-Commands li:last-child .DocSearch-Label::after {
+ content: "to close";
+ font-size: 0.75rem;
+}
+
+.DocSearch-Modal:has(.DocSearch-Input:not(:placeholder-shown))
+ .DocSearch-Commands
+ li:last-child
+ .DocSearch-Label::after {
+ content: "to clear";
}
/* Responsive */
diff --git a/yarn.lock b/yarn.lock
index 5fcc3b57d..c7f896e07 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -5,6 +5,18 @@ __metadata:
version: 8
cacheKey: 10c0
+"@algolia/abtesting@npm:1.16.1":
+ version: 1.16.1
+ resolution: "@algolia/abtesting@npm:1.16.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ "@algolia/requester-browser-xhr": "npm:5.50.1"
+ "@algolia/requester-fetch": "npm:5.50.1"
+ "@algolia/requester-node-http": "npm:5.50.1"
+ checksum: 10c0/0ca113338a447693b4827bdf87f37490ccd81bc1bbbe39b02c338ff79582379a68853c3d35fb2297fd5636fa43818dac9e04b59965a8b47851e8b1da041b45e8
+ languageName: node
+ linkType: hard
+
"@algolia/autocomplete-core@npm:1.19.2":
version: 1.19.2
resolution: "@algolia/autocomplete-core@npm:1.19.2"
@@ -36,6 +48,148 @@ __metadata:
languageName: node
linkType: hard
+"@algolia/client-abtesting@npm:5.50.1":
+ version: 5.50.1
+ resolution: "@algolia/client-abtesting@npm:5.50.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ "@algolia/requester-browser-xhr": "npm:5.50.1"
+ "@algolia/requester-fetch": "npm:5.50.1"
+ "@algolia/requester-node-http": "npm:5.50.1"
+ checksum: 10c0/a3fb097e72acc5f1b009694774c0b23e1a7701ec4f54bbf4b20114f9adc73565f8d8c7fba492d769b6f5becd1ef4bf6b92073fb289cd06bfb3e12b2f0989f9ae
+ languageName: node
+ linkType: hard
+
+"@algolia/client-analytics@npm:5.50.1":
+ version: 5.50.1
+ resolution: "@algolia/client-analytics@npm:5.50.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ "@algolia/requester-browser-xhr": "npm:5.50.1"
+ "@algolia/requester-fetch": "npm:5.50.1"
+ "@algolia/requester-node-http": "npm:5.50.1"
+ checksum: 10c0/ade9f7ee8e8872f0c54149a9292fc32bad9e0b189068ca283f7110ce3f638b14c5078ce43d2c00c2bf752d3aa96e6bea63e4f1184cbe5bc36501074d96595d05
+ languageName: node
+ linkType: hard
+
+"@algolia/client-common@npm:5.50.1":
+ version: 5.50.1
+ resolution: "@algolia/client-common@npm:5.50.1"
+ checksum: 10c0/4750773473748fec73a7a9be3081274e21f2c4ccac463618b2ec470113c44c1f6961a991382c999acf04bd83e074547cd57c6304c4218d31bb0089b5c1099bf3
+ languageName: node
+ linkType: hard
+
+"@algolia/client-insights@npm:5.50.1":
+ version: 5.50.1
+ resolution: "@algolia/client-insights@npm:5.50.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ "@algolia/requester-browser-xhr": "npm:5.50.1"
+ "@algolia/requester-fetch": "npm:5.50.1"
+ "@algolia/requester-node-http": "npm:5.50.1"
+ checksum: 10c0/62ca243328f38e9a245e2860c12d1e76529e9bf68d5a30881a053adf5cbaddda27af631edd33e23d879a9e5445c66e2654f0149695cd1b75b09b42ea57ef575f
+ languageName: node
+ linkType: hard
+
+"@algolia/client-personalization@npm:5.50.1":
+ version: 5.50.1
+ resolution: "@algolia/client-personalization@npm:5.50.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ "@algolia/requester-browser-xhr": "npm:5.50.1"
+ "@algolia/requester-fetch": "npm:5.50.1"
+ "@algolia/requester-node-http": "npm:5.50.1"
+ checksum: 10c0/cbc099bd7a5f8ccefd4135a59dfa2b6136b751ed35d451a0c89738c8ad404195348d5553630ab8e59f056f17b8a284e918151696050b740d96e304c8f40174fd
+ languageName: node
+ linkType: hard
+
+"@algolia/client-query-suggestions@npm:5.50.1":
+ version: 5.50.1
+ resolution: "@algolia/client-query-suggestions@npm:5.50.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ "@algolia/requester-browser-xhr": "npm:5.50.1"
+ "@algolia/requester-fetch": "npm:5.50.1"
+ "@algolia/requester-node-http": "npm:5.50.1"
+ checksum: 10c0/345e0ecaf587aec2a956c2039da817fd26e203c8689fe8e0d428baf6ab03f0809a936099ae420e779d3ec252bbcaf3061c6e8670c660d7a9d66e98627d8938df
+ languageName: node
+ linkType: hard
+
+"@algolia/client-search@npm:5.50.1":
+ version: 5.50.1
+ resolution: "@algolia/client-search@npm:5.50.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ "@algolia/requester-browser-xhr": "npm:5.50.1"
+ "@algolia/requester-fetch": "npm:5.50.1"
+ "@algolia/requester-node-http": "npm:5.50.1"
+ checksum: 10c0/7910c074aa7b4fbbad2af082a7623d7d65ba0c19e0933d4658e43d588cd87ed2e851aad0c5428ce2a00a3e3248349fcda20ed5abb7700b93d03a475e2ce7a378
+ languageName: node
+ linkType: hard
+
+"@algolia/ingestion@npm:1.50.1":
+ version: 1.50.1
+ resolution: "@algolia/ingestion@npm:1.50.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ "@algolia/requester-browser-xhr": "npm:5.50.1"
+ "@algolia/requester-fetch": "npm:5.50.1"
+ "@algolia/requester-node-http": "npm:5.50.1"
+ checksum: 10c0/0d5264db46783d648246406349fe88dbc6fa1cdd74ed16500bb8a4e5efb1bdfd7174780065566fcb7317f7ba8ac858677ffb0d5194a1315c0ce6003bd4219d87
+ languageName: node
+ linkType: hard
+
+"@algolia/monitoring@npm:1.50.1":
+ version: 1.50.1
+ resolution: "@algolia/monitoring@npm:1.50.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ "@algolia/requester-browser-xhr": "npm:5.50.1"
+ "@algolia/requester-fetch": "npm:5.50.1"
+ "@algolia/requester-node-http": "npm:5.50.1"
+ checksum: 10c0/378076310011c77c91378a597d86d791d4821d1d00e3c500ec8828e72b9036bb974abb09bd0c10aa05fc75a50aa443be26985104ca78524a0a0cf34707536c70
+ languageName: node
+ linkType: hard
+
+"@algolia/recommend@npm:5.50.1":
+ version: 5.50.1
+ resolution: "@algolia/recommend@npm:5.50.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ "@algolia/requester-browser-xhr": "npm:5.50.1"
+ "@algolia/requester-fetch": "npm:5.50.1"
+ "@algolia/requester-node-http": "npm:5.50.1"
+ checksum: 10c0/0cf061bf2fc46240d93c6fe032693e143a5eb61a3fc27f619141ebea735b7e7f6c5c38b31b152e9ef074b61373549a1f72a76399d80ed55840251cc71438f829
+ languageName: node
+ linkType: hard
+
+"@algolia/requester-browser-xhr@npm:5.50.1":
+ version: 5.50.1
+ resolution: "@algolia/requester-browser-xhr@npm:5.50.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ checksum: 10c0/aa55122f483a0d1572da20b71b0b533493960894460ad545a6a50e1c73780affd4764d68aa5a1687894d23c31a972cc92886a0d8ed3324b6f5457efd58b424af
+ languageName: node
+ linkType: hard
+
+"@algolia/requester-fetch@npm:5.50.1":
+ version: 5.50.1
+ resolution: "@algolia/requester-fetch@npm:5.50.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ checksum: 10c0/07232c12ff0a5b25e5e6dfeeed8e46765f347926f263774e9ae061e65bd1ddce029f78fd5feaa34e23c80e80b0a84874d8799f817368e924cc904aef4f8f8181
+ languageName: node
+ linkType: hard
+
+"@algolia/requester-node-http@npm:5.50.1":
+ version: 5.50.1
+ resolution: "@algolia/requester-node-http@npm:5.50.1"
+ dependencies:
+ "@algolia/client-common": "npm:5.50.1"
+ checksum: 10c0/51be1452a28d4aeb97306121d164a3161fb55b775189df631f968bc752e00538a9872d0e0a2ad97744f8ca87c39f8352b526b8b290805ddaf5a2d4f43ae3360f
+ languageName: node
+ linkType: hard
+
"@asamuzakjp/css-color@npm:^3.2.0":
version: 3.2.0
resolution: "@asamuzakjp/css-color@npm:3.2.0"
@@ -3312,6 +3466,28 @@ __metadata:
languageName: node
linkType: hard
+"algoliasearch@npm:^5.50.1":
+ version: 5.50.1
+ resolution: "algoliasearch@npm:5.50.1"
+ dependencies:
+ "@algolia/abtesting": "npm:1.16.1"
+ "@algolia/client-abtesting": "npm:5.50.1"
+ "@algolia/client-analytics": "npm:5.50.1"
+ "@algolia/client-common": "npm:5.50.1"
+ "@algolia/client-insights": "npm:5.50.1"
+ "@algolia/client-personalization": "npm:5.50.1"
+ "@algolia/client-query-suggestions": "npm:5.50.1"
+ "@algolia/client-search": "npm:5.50.1"
+ "@algolia/ingestion": "npm:1.50.1"
+ "@algolia/monitoring": "npm:1.50.1"
+ "@algolia/recommend": "npm:5.50.1"
+ "@algolia/requester-browser-xhr": "npm:5.50.1"
+ "@algolia/requester-fetch": "npm:5.50.1"
+ "@algolia/requester-node-http": "npm:5.50.1"
+ checksum: 10c0/4b91f019c89324786e23f90b7773eb82b142e8075c95f204cf6fc07f320fcbbf623ca338509647d93b9776f4645a1f72debb2800627c4bf1b80e3ed8f2b398b1
+ languageName: node
+ linkType: hard
+
"ansi-align@npm:^3.0.1":
version: 3.0.1
resolution: "ansi-align@npm:3.0.1"
@@ -6678,9 +6854,9 @@ __metadata:
linkType: hard
"lodash@npm:^4.17.10, lodash@npm:^4.17.21":
- version: 4.18.1
- resolution: "lodash@npm:4.18.1"
- checksum: 10c0/757228fc68805c59789e82185135cf85f05d0b2d3d54631d680ca79ec21944ec8314d4533639a14b8bcfbd97a517e78960933041a5af17ecb693ec6eecb99a27
+ version: 4.17.23
+ resolution: "lodash@npm:4.17.23"
+ checksum: 10c0/1264a90469f5bb95d4739c43eb6277d15b6d9e186df4ac68c3620443160fc669e2f14c11e7d8b2ccf078b81d06147c01a8ccced9aab9f9f63d50dcf8cace6bf6
languageName: node
linkType: hard
@@ -9046,6 +9222,7 @@ __metadata:
"@types/react": "npm:^19.2.14"
"@vitejs/plugin-react": "npm:^6.0.1"
"@vitest/browser-playwright": "npm:^4.1.2"
+ algoliasearch: "npm:^5.50.1"
auto-image-converter: "npm:^2.2.0"
chokidar: "npm:^4.0.3"
docson: "npm:^2.1.0"