@@ -5,6 +5,7 @@ import { qs, escapeHtmlEntities, isBlank, getQueryParamByName, getProjectNameAnd
55import { setSearchInputValue } from './search-bar'
66import searchResultsTemplate from './handlebars/templates/search-results.handlebars'
77import { getSearchNodes } from './globals'
8+ import { highlightMatches } from './highlighter'
89
910const EXCERPT_RADIUS = 80
1011const SEARCH_CONTAINER_SELECTOR = '#search'
@@ -23,7 +24,7 @@ lunr.Pipeline.registerFunction(docTrimmerFunction, 'docTrimmer')
2324
2425window . addEventListener ( 'exdoc:loaded' , initialize )
2526
26- function initialize ( ) {
27+ function initialize ( ) {
2728 const pathname = window . location . pathname
2829 if ( pathname . endsWith ( '/search.html' ) || pathname . endsWith ( '/search' ) ) {
2930 const query = getQueryParamByName ( 'q' )
@@ -32,7 +33,7 @@ function initialize () {
3233 }
3334}
3435
35- async function search ( value , queryType ) {
36+ async function search ( value , queryType ) {
3637 if ( isBlank ( value ) ) {
3738 renderResults ( { value } )
3839 } else {
@@ -55,7 +56,7 @@ async function search (value, queryType) {
5556 }
5657}
5758
58- async function localSearch ( value ) {
59+ async function localSearch ( value ) {
5960 const index = await getIndex ( )
6061
6162 // We cannot match on atoms :foo because that would be considered
@@ -64,7 +65,7 @@ async function localSearch (value) {
6465 return searchResultsToDecoratedSearchItems ( index . search ( fixedValue ) )
6566}
6667
67- async function remoteSearch ( value , queryType , searchNodes ) {
68+ async function remoteSearch ( value , queryType , searchNodes ) {
6869 let filterNodes = searchNodes
6970
7071 if ( queryType === 'latest' ) {
@@ -85,7 +86,7 @@ async function remoteSearch (value, queryType, searchNodes) {
8586 return payload . hits . map ( result => {
8687 const [ packageName , packageVersion ] = result . document . package . split ( '-' )
8788
88- const doc = result . document . doc
89+ const doc = highlightMatches ( result . document . doc , value , { multiline : true } )
8990 const excerpts = [ doc ]
9091 const metadata = { }
9192 const ref = `https://hexdocs.pm/${ packageName } /${ packageVersion } /${ result . document . ref } `
@@ -106,13 +107,13 @@ async function remoteSearch (value, queryType, searchNodes) {
106107 }
107108}
108109
109- function renderResults ( { value, results, errorMessage } ) {
110+ function renderResults ( { value, results, errorMessage } ) {
110111 const searchContainer = qs ( SEARCH_CONTAINER_SELECTOR )
111112 const resultsHtml = searchResultsTemplate ( { value, results, errorMessage } )
112113 searchContainer . innerHTML = resultsHtml
113114}
114115
115- async function getIndex ( ) {
116+ async function getIndex ( ) {
116117 const cachedIndex = await loadIndex ( )
117118 if ( cachedIndex ) { return cachedIndex }
118119
@@ -121,7 +122,7 @@ async function getIndex () {
121122 return index
122123}
123124
124- async function loadIndex ( ) {
125+ async function loadIndex ( ) {
125126 try {
126127 const serializedIndex = sessionStorage . getItem ( indexStorageKey ( ) )
127128 if ( serializedIndex ) {
@@ -136,7 +137,7 @@ async function loadIndex () {
136137 }
137138}
138139
139- async function saveIndex ( index ) {
140+ async function saveIndex ( index ) {
140141 try {
141142 const serializedIndex = await compress ( index )
142143 sessionStorage . setItem ( indexStorageKey ( ) , serializedIndex )
@@ -145,7 +146,7 @@ async function saveIndex (index) {
145146 }
146147}
147148
148- async function compress ( index ) {
149+ async function compress ( index ) {
149150 const stream = new Blob ( [ JSON . stringify ( index ) ] , {
150151 type : 'application/json'
151152 } ) . stream ( ) . pipeThrough ( new window . CompressionStream ( 'gzip' ) )
@@ -155,7 +156,7 @@ async function compress (index) {
155156 return b64encode ( buffer )
156157}
157158
158- async function decompress ( index ) {
159+ async function decompress ( index ) {
159160 const stream = new Blob ( [ b64decode ( index ) ] , {
160161 type : 'application/json'
161162 } ) . stream ( ) . pipeThrough ( new window . DecompressionStream ( 'gzip' ) )
@@ -164,7 +165,7 @@ async function decompress (index) {
164165 return JSON . parse ( blob )
165166}
166167
167- function b64encode ( buffer ) {
168+ function b64encode ( buffer ) {
168169 let binary = ''
169170 const bytes = new Uint8Array ( buffer )
170171 const len = bytes . byteLength
@@ -174,7 +175,7 @@ function b64encode (buffer) {
174175 return window . btoa ( binary )
175176}
176177
177- function b64decode ( str ) {
178+ function b64decode ( str ) {
178179 const binaryString = window . atob ( str )
179180 const len = binaryString . length
180181 const bytes = new Uint8Array ( new ArrayBuffer ( len ) )
@@ -184,11 +185,11 @@ function b64decode (str) {
184185 return bytes
185186}
186187
187- function indexStorageKey ( ) {
188+ function indexStorageKey ( ) {
188189 return `idv5:${ getProjectNameAndVersion ( ) } `
189190}
190191
191- function createIndex ( ) {
192+ function createIndex ( ) {
192193 return lunr ( function ( ) {
193194 this . ref ( 'ref' )
194195 this . field ( 'title' , { boost : 3 } )
@@ -206,11 +207,11 @@ function createIndex () {
206207 } )
207208}
208209
209- function docTokenSplitter ( builder ) {
210+ function docTokenSplitter ( builder ) {
210211 builder . pipeline . before ( lunr . stemmer , docTokenFunction )
211212}
212213
213- function docTokenFunction ( token ) {
214+ function docTokenFunction ( token ) {
214215 // If we have something with an arity, we split on : . to make partial
215216 // matches easier. We split only when tokenizing, not when searching.
216217 // Below we use ExDoc.Markdown.to_ast/2 as an example.
@@ -274,11 +275,11 @@ function docTokenFunction (token) {
274275 return tokens
275276}
276277
277- function docTrimmer ( builder ) {
278+ function docTrimmer ( builder ) {
278279 builder . pipeline . before ( lunr . stemmer , docTrimmerFunction )
279280}
280281
281- function docTrimmerFunction ( token ) {
282+ function docTrimmerFunction ( token ) {
282283 // Preserve @ and : at the beginning of tokens,
283284 // and ? and ! at the end of tokens. It needs to
284285 // be done before stemming, otherwise search and
@@ -288,7 +289,7 @@ function docTrimmerFunction (token) {
288289 } )
289290}
290291
291- function searchResultsToDecoratedSearchItems ( results ) {
292+ function searchResultsToDecoratedSearchItems ( results ) {
292293 return results
293294 // If the docs are regenerated without changing its version,
294295 // a reference may have been doc'ed false in the code but
@@ -305,11 +306,11 @@ function searchResultsToDecoratedSearchItems (results) {
305306 } )
306307}
307308
308- function getSearchItemByRef ( ref ) {
309+ function getSearchItemByRef ( ref ) {
309310 return searchData . items . find ( searchItem => searchItem . ref === ref ) || null
310311}
311312
312- function getExcerpts ( searchItem , metadata ) {
313+ function getExcerpts ( searchItem , metadata ) {
313314 const { doc } = searchItem
314315 const searchTerms = Object . keys ( metadata )
315316
@@ -330,7 +331,7 @@ function getExcerpts (searchItem, metadata) {
330331 return excerpts . slice ( 0 , 1 )
331332}
332333
333- function excerpt ( doc , sliceStart , sliceLength ) {
334+ function excerpt ( doc , sliceStart , sliceLength ) {
334335 const startPos = Math . max ( sliceStart - EXCERPT_RADIUS , 0 )
335336 const endPos = Math . min ( sliceStart + sliceLength + EXCERPT_RADIUS , doc . length )
336337 return [
0 commit comments