-
-
Notifications
You must be signed in to change notification settings - Fork 0
/
9-2152530669b4465e711f.js.map
1 lines (1 loc) · 92.9 KB
/
9-2152530669b4465e711f.js.map
1
{"version":3,"sources":["webpack:///./node_modules/elasticlunr/elasticlunr.js"],"names":["__WEBPACK_AMD_DEFINE_FACTORY__","__WEBPACK_AMD_DEFINE_RESULT__","global","step2list","step3list","v","C","re_mgr0","re_mgr1","re_meq1","re_s_v","re_1a","re2_1a","re_1b","re2_1b","re_1b_2","re2_1b_2","re3_1b_2","re4_1b_2","re_1c","re_2","re_3","re_4","re2_4","re_5","re_5_1","re3_5","elasticlunr","config","idx","Index","pipeline","add","trimmer","stopWordFilter","stemmer","call","version","lunr","utils","warn","this","message","console","toString","obj","EventEmitter","events","prototype","addListener","args","Array","slice","arguments","fn","pop","names","TypeError","forEach","name","hasHandler","push","removeListener","fnIndex","indexOf","splice","length","emit","apply","undefined","tokenizer","str","isArray","arr","filter","token","map","t","toLowerCase","out","item","tokens","split","seperator","concat","trim","defaultSeperator","setSeperator","sep","resetSeperator","getSeperator","Pipeline","_queue","registeredFunctions","registerFunction","label","getRegisteredFunction","warnIfFunctionNotRegistered","load","serialised","fnName","Error","after","existingFn","newFn","pos","before","remove","run","tokenLength","pipelineLength","i","j","reset","get","toJSON","_fields","_ref","documentStore","DocumentStore","index","eventEmitter","_idfCache","on","bind","off","serialisedData","field","fields","ref","InvertedIndex","addField","fieldName","setRef","refName","saveDocument","save","addDoc","doc","emitEvent","docRef","fieldTokens","addFieldLength","tokenCount","termFrequency","Math","sqrt","addToken","tf","removeDocByRef","isDocStored","hasDoc","getDoc","removeDoc","removeToken","updateDoc","idf","term","cacheKey","Object","hasOwnProperty","df","getDocFreq","log","getFields","search","query","userConfig","configStr","JSON","stringify","Configuration","queryTokens","queryResults","fieldSearchResults","fieldSearch","fieldBoost","boost","results","score","sort","a","b","booleanType","bool","expand","scores","docTokens","expandToken","queryTokenScores","key","docs","getDocs","filteredDocs","fieldSearchStats","getTermFrequency","fieldLength","getFieldLength","fieldLengthNorm","penality","mergeScores","coordNorm","accumScores","op","intersection","n","indexJson","use","plugin","unshift","_save","docInfo","store","copy","constructor","attr","clone","updateFieldLength","ational","tional","enci","anci","izer","bli","alli","entli","eli","ousli","ization","ation","ator","alism","iveness","fulness","ousness","aliti","iviti","biliti","logi","icate","ative","alize","iciti","ical","ful","ness","c","RegExp","w","stem","suffix","firstch","re","re2","re3","re4","substr","toUpperCase","test","replace","fp","exec","stopWords","clearStopWords","addStopWords","words","word","resetStopWords","defaultStopWords","","able","about","across","all","almost","also","am","among","an","and","any","are","as","at","be","because","been","but","by","can","cannot","could","dear","did","do","does","either","else","ever","every","for","from","got","had","has","have","he","her","hers","him","his","how","however","if","in","into","is","it","its","just","least","let","like","likely","may","me","might","most","must","my","neither","no","nor","not","of","often","only","or","other","our","own","rather","said","say","says","she","should","since","so","some","than","that","the","their","them","then","there","these","they","tis","to","too","twas","us","wants","was","we","were","what","when","where","which","while","who","whom","why","will","with","would","yet","you","your","root","tokenInfo","hasToken","node","getNode","memo","parse","buildUserConfig","error","buildDefaultConfig","global_bool","global_expand","field_config","field_expand","addAllFields2UserConfig","SortedSet","elements","set","element","locationFor","toArray","ctx","elem","start","end","sectionLength","pivot","floor","pivotElem","intersect","otherSet","intersectSet","a_len","b_len","union","longSet","shortSet","unionSet","shortSetElements","exports","__webpack_require__","module"],"mappings":"6EAAA,IAAAA,EAAAC,GAUA,WA6DA,IAqCAC,EA6uCAC,EAwBAC,EAWAC,EACAC,EAQAC,EACAC,EACAC,EACAC,EAEAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EAEAC,EACAC,EAEAC,EAEAC,EACAC,EAEAC,EACAC,EACAC,EAt1CAC,EAAA,SAAAC,GACA,IAAAC,EAAA,IAAAF,EAAAG,MAUA,OARAD,EAAAE,SAAAC,IACAL,EAAAM,QACAN,EAAAO,eACAP,EAAAQ,SAGAP,KAAAQ,KAAAP,KAEAA,GAGAF,EAAAU,QAAA,QAIAC,KAAAX,EAWAA,EAAAY,MAAA,GAQAZ,EAAAY,MAAAC,MAAAtC,EAMCuC,KALD,SAAAC,GACAxC,EAAAyC,iBAAAH,MACAG,QAAAH,KAAAE,KAgBAf,EAAAY,MAAAK,SAAA,SAAAC,GACA,OAAAA,QACA,GAGAA,EAAAD,YAiBAjB,EAAAmB,aAAA,WACAL,KAAAM,OAAA,IAYApB,EAAAmB,aAAAE,UAAAC,YAAA,WACA,IAAAC,EAAAC,MAAAH,UAAAI,MAAAhB,KAAAiB,WACAC,EAAAJ,EAAAK,MACAC,EAAAN,EAEA,sBAAAI,EAAA,UAAAG,UAAA,oCAEAD,EAAAE,QAAA,SAAAC,GACAlB,KAAAmB,WAAAD,KAAAlB,KAAAM,OAAAY,GAAA,IACAlB,KAAAM,OAAAY,GAAAE,KAAAP,IACGb,OAUHd,EAAAmB,aAAAE,UAAAc,eAAA,SAAAH,EAAAL,GACA,GAAAb,KAAAmB,WAAAD,GAAA,CAEA,IAAAI,EAAAtB,KAAAM,OAAAY,GAAAK,QAAAV,IACA,IAAAS,IAEAtB,KAAAM,OAAAY,GAAAM,OAAAF,EAAA,GAEA,GAAAtB,KAAAM,OAAAY,GAAAO,eAAAzB,KAAAM,OAAAY,MAYAhC,EAAAmB,aAAAE,UAAAmB,KAAA,SAAAR,GACA,GAAAlB,KAAAmB,WAAAD,GAAA,CAEA,IAAAT,EAAAC,MAAAH,UAAAI,MAAAhB,KAAAiB,UAAA,GAEAZ,KAAAM,OAAAY,GAAAD,QAAA,SAAAJ,GACAA,EAAAc,WAAAC,EAAAnB,IACGT,QAUHd,EAAAmB,aAAAE,UAAAY,WAAA,SAAAD,GACA,OAAAA,KAAAlB,KAAAM,QAqBApB,EAAA2C,UAAA,SAAAC,GACA,IAAAlB,UAAAa,QAAA,MAAAK,EAAA,SACA,GAAApB,MAAAqB,QAAAD,GAAA,CACA,IAAAE,EAAAF,EAAAG,OAAA,SAAAC,GACA,OAAAA,UAOAF,IAAAG,IAAA,SAAAC,GACA,OAAAlD,EAAAY,MAAAK,SAAAiC,GAAAC,gBAGA,IAAAC,EAAA,GAMA,OALAN,EAAAf,QAAA,SAAAsB,GACA,IAAAC,EAAAD,EAAAE,MAAAvD,EAAA2C,UAAAa,WACAJ,IAAAK,OAAAH,IACKxC,MAELsC,EAGA,OAAAR,EAAA3B,WAAAyC,OAAAP,cAAAI,MAAAvD,EAAA2C,UAAAa,YAMAxD,EAAA2C,UAAAgB,iBAAA,UASA3D,EAAA2C,UAAAa,UAAAxD,EAAA2C,UAAAgB,iBAOA3D,EAAA2C,UAAAiB,aAAA,SAAAC,GACAA,SAAA,qBACA7D,EAAA2C,UAAAa,UAAAK,IAQA7D,EAAA2C,UAAAmB,eAAA,WACA9D,EAAA2C,UAAAa,UAAAxD,EAAA2C,UAAAgB,kBAOA3D,EAAA2C,UAAAoB,aAAA,WACA,OAAA/D,EAAA2C,UAAAa,WAkCAxD,EAAAgE,SAAA,WACAlD,KAAAmD,OAAA,IAGAjE,EAAAgE,SAAAE,oBAAA,GAeAlE,EAAAgE,SAAAG,iBAAA,SAAAxC,EAAAyC,GACAA,KAAApE,EAAAgE,SAAAE,qBACAlE,EAAAY,MAAAC,KAAA,6CAAAuD,GAGAzC,EAAAyC,QACApE,EAAAgE,SAAAE,oBAAAE,GAAAzC,GAUA3B,EAAAgE,SAAAK,sBAAA,SAAAD,GACA,OAAAA,KAAApE,EAAAgE,SAAAE,sBAAA,EACA,KAGAlE,EAAAgE,SAAAE,oBAAAE,IAUApE,EAAAgE,SAAAM,4BAAA,SAAA3C,GACAA,EAAAyC,OAAAzC,EAAAyC,SAAAtD,KAAAoD,qBAGAlE,EAAAY,MAAAC,KAAA,kGAAAc,IAeA3B,EAAAgE,SAAAO,KAAA,SAAAC,GACA,IAAApE,EAAA,IAAAJ,EAAAgE,SAYA,OAVAQ,EAAAzC,QAAA,SAAA0C,GACA,IAAA9C,EAAA3B,EAAAgE,SAAAK,sBAAAI,GAEA,IAAA9C,EAGA,UAAA+C,MAAA,uCAAAD,GAFArE,EAAAC,IAAAsB,KAMAvB,GAWAJ,EAAAgE,SAAA3C,UAAAhB,IAAA,WACAmB,MAAAH,UAAAI,MAAAhB,KAAAiB,WAEAK,QAAA,SAAAJ,GACA3B,EAAAgE,SAAAM,4BAAA3C,GACAb,KAAAmD,OAAA/B,KAAAP,IACGb,OAcHd,EAAAgE,SAAA3C,UAAAsD,MAAA,SAAAC,EAAAC,GACA7E,EAAAgE,SAAAM,4BAAAO,GAEA,IAAAC,EAAAhE,KAAAmD,OAAA5B,QAAAuC,GACA,QAAAE,EACA,UAAAJ,MAAA,0BAGA5D,KAAAmD,OAAA3B,OAAAwC,EAAA,IAAAD,IAcA7E,EAAAgE,SAAA3C,UAAA0D,OAAA,SAAAH,EAAAC,GACA7E,EAAAgE,SAAAM,4BAAAO,GAEA,IAAAC,EAAAhE,KAAAmD,OAAA5B,QAAAuC,GACA,QAAAE,EACA,UAAAJ,MAAA,0BAGA5D,KAAAmD,OAAA3B,OAAAwC,EAAA,EAAAD,IASA7E,EAAAgE,SAAA3C,UAAA2D,OAAA,SAAArD,GACA,IAAAmD,EAAAhE,KAAAmD,OAAA5B,QAAAV,IACA,IAAAmD,GAIAhE,KAAAmD,OAAA3B,OAAAwC,EAAA,IAWA9E,EAAAgE,SAAA3C,UAAA4D,IAAA,SAAA3B,GAKA,IAJA,IAAAF,EAAA,GACA8B,EAAA5B,EAAAf,OACA4C,EAAArE,KAAAmD,OAAA1B,OAEA6C,EAAA,EAAiBA,EAAAF,EAAiBE,IAAA,CAGlC,IAFA,IAAApC,EAAAM,EAAA8B,GAEAC,EAAA,EAAmBA,EAAAF,GAEnBnC,OADAA,EAAAlC,KAAAmD,OAAAoB,GAAArC,EAAAoC,EAAA9B,IADuC+B,KAKvCrC,SAAAI,EAAAlB,KAAAc,GAGA,OAAAI,GAQApD,EAAAgE,SAAA3C,UAAAiE,MAAA,WACAxE,KAAAmD,OAAA,IAQAjE,EAAAgE,SAAA3C,UAAAkE,IAAA,WACA,OAAAzE,KAAAmD,QAcAjE,EAAAgE,SAAA3C,UAAAmE,OAAA,WACA,OAAA1E,KAAAmD,OAAAhB,IAAA,SAAAtB,GAEA,OADA3B,EAAAgE,SAAAM,4BAAA3C,GACAA,EAAAyC,SAgBApE,EAAAG,MAAA,WACAW,KAAA2E,QAAA,GACA3E,KAAA4E,KAAA,KACA5E,KAAAV,SAAA,IAAAJ,EAAAgE,SACAlD,KAAA6E,cAAA,IAAA3F,EAAA4F,cACA9E,KAAA+E,MAAA,GACA/E,KAAAgF,aAAA,IAAA9F,EAAAmB,aACAL,KAAAiF,UAAA,GAEAjF,KAAAkF,GAAA,mCACAlF,KAAAiF,UAAA,IACGE,KAAAnF,QAYHd,EAAAG,MAAAkB,UAAA2E,GAAA,WACA,IAAAzE,EAAAC,MAAAH,UAAAI,MAAAhB,KAAAiB,WACA,OAAAZ,KAAAgF,aAAAxE,YAAAmB,MAAA3B,KAAAgF,aAAAvE,IAUAvB,EAAAG,MAAAkB,UAAA6E,IAAA,SAAAlE,EAAAL,GACA,OAAAb,KAAAgF,aAAA3D,eAAAH,EAAAL,IAaA3B,EAAAG,MAAAoE,KAAA,SAAA4B,GACAA,EAAAzF,UAAAV,EAAAU,SACAV,EAAAY,MAAAC,KAAA,6BACAb,EAAAU,QAAA,cAAAyF,EAAAzF,SAGA,IAAAR,EAAA,IAAAY,KAOA,QAAAsF,KALAlG,EAAAuF,QAAAU,EAAAE,OACAnG,EAAAwF,KAAAS,EAAAG,IACApG,EAAAyF,cAAA3F,EAAA4F,cAAArB,KAAA4B,EAAAR,eACAzF,EAAAE,SAAAJ,EAAAgE,SAAAO,KAAA4B,EAAA/F,UACAF,EAAA2F,MAAA,GACAM,EAAAN,MACA3F,EAAA2F,MAAAO,GAAApG,EAAAuG,cAAAhC,KAAA4B,EAAAN,MAAAO,IAGA,OAAAlG,GAgBAF,EAAAG,MAAAkB,UAAAmF,SAAA,SAAAC,GAGA,OAFA3F,KAAA2E,QAAAvD,KAAAuE,GACA3F,KAAA+E,MAAAY,GAAA,IAAAzG,EAAAuG,cACAzF,MAgBAd,EAAAG,MAAAkB,UAAAqF,OAAA,SAAAC,GAEA,OADA7F,KAAA4E,KAAAiB,EACA7F,MAaAd,EAAAG,MAAAkB,UAAAuF,aAAA,SAAAC,GAEA,OADA/F,KAAA6E,cAAA,IAAA3F,EAAA4F,cAAAiB,GACA/F,MAkBAd,EAAAG,MAAAkB,UAAAyF,OAAA,SAAAC,EAAAC,GACA,GAAAD,EAAA,CACAC,OAAAtE,IAAAsE,KAAA,IAEAC,EAAAF,EAAAjG,KAAA4E,MAEA5E,KAAA6E,cAAAmB,OAAAG,EAAAF,GACAjG,KAAA2E,QAAA1D,QAAA,SAAAqE,GACA,IAAAc,EAAApG,KAAAV,SAAA6E,IAAAjF,EAAA2C,UAAAoE,EAAAX,KACAtF,KAAA6E,cAAAwB,eAAAF,EAAAb,EAAAc,EAAA3E,QAEA,IAAA6E,EAAA,GAMA,QAAApE,KALAkE,EAAAnF,QAAA,SAAAiB,GACAA,KAAAoE,IAAApE,IAAA,EACAoE,EAAApE,GAAA,GACKlC,MAELsG,EAAA,CACA,IAAAC,EAAAD,EAAApE,GACAqE,EAAAC,KAAAC,KAAAF,GACAvG,KAAA+E,MAAAO,GAAAoB,SAAAxE,EAAA,CAAyCsD,IAAAW,EAAAQ,GAAAJ,MAEtCvG,MAEHkG,GAAAlG,KAAAgF,aAAAtD,KAAA,MAAAuE,EAAAjG,QAmBAd,EAAAG,MAAAkB,UAAAqG,eAAA,SAAAT,EAAAD,GACA,GAAAC,IACA,IAAAnG,KAAA6E,cAAAgC,eAIA7G,KAAA6E,cAAAiC,OAAAX,GAAA,CACA,IAAAF,EAAAjG,KAAA6E,cAAAkC,OAAAZ,GACAnG,KAAAgH,UAAAf,GAAA,KAmBA/G,EAAAG,MAAAkB,UAAAyG,UAAA,SAAAf,EAAAC,GACA,GAAAD,EAAA,CAEAC,OAAAtE,IAAAsE,KAAA,IAEAC,EAAAF,EAAAjG,KAAA4E,MACA5E,KAAA6E,cAAAiC,OAAAX,KAEAnG,KAAA6E,cAAAmC,UAAAb,GAEAnG,KAAA2E,QAAA1D,QAAA,SAAAqE,GACAtF,KAAAV,SAAA6E,IAAAjF,EAAA2C,UAAAoE,EAAAX,KACArE,QAAA,SAAAiB,GACAlC,KAAA+E,MAAAO,GAAA2B,YAAA/E,EAAAiE,IACKnG,OACFA,MAEHkG,GAAAlG,KAAAgF,aAAAtD,KAAA,SAAAuE,EAAAjG,SAuBAd,EAAAG,MAAAkB,UAAA2G,UAAA,SAAAjB,EAAAC,GACAA,OAAAtE,IAAAsE,KAEAlG,KAAA4G,eAAAX,EAAAjG,KAAA4E,OAAA,GACA5E,KAAAgG,OAAAC,GAAA,GAEAC,GAAAlG,KAAAgF,aAAAtD,KAAA,SAAAuE,EAAAjG,OAYAd,EAAAG,MAAAkB,UAAA4G,IAAA,SAAAC,EAAA9B,GACA,IAAA+B,EAAA,IAAA/B,EAAA,IAAA8B,EACA,GAAAE,OAAA/G,UAAAgH,eAAA5H,KAAAK,KAAAiF,UAAAoC,GAAA,OAAArH,KAAAiF,UAAAoC,GAEA,IAAAG,EAAAxH,KAAA+E,MAAAO,GAAAmC,WAAAL,GACAD,EAAA,EAAAX,KAAAkB,IAAA1H,KAAA6E,cAAApD,QAAA+F,EAAA,IAGA,OAFAxH,KAAAiF,UAAAoC,GAAAF,EAEAA,GAQAjI,EAAAG,MAAAkB,UAAAoH,UAAA,WACA,OAAA3H,KAAA2E,QAAAhE,SA4BAzB,EAAAG,MAAAkB,UAAAqH,OAAA,SAAAC,EAAAC,GACA,IAAAD,EAAA,SAEA,IAAAE,EAAA,KACA,MAAAD,IACAC,EAAAC,KAAAC,UAAAH,IAGA,IAAA3I,EAAA,IAAAD,EAAAgJ,cAAAH,EAAA/H,KAAA2H,aAAAlD,MAEA0D,EAAAnI,KAAAV,SAAA6E,IAAAjF,EAAA2C,UAAAgG,IAEAO,EAAA,GAEA,QAAA9C,KAAAnG,EAAA,CACA,IAAAkJ,EAAArI,KAAAsI,YAAAH,EAAA7C,EAAAnG,GACAoJ,EAAApJ,EAAAmG,GAAAkD,MAEA,QAAArC,KAAAkC,EACAA,EAAAlC,GAAAkC,EAAAlC,GAAAoC,EAGA,QAAApC,KAAAkC,EACAlC,KAAAiC,EACAA,EAAAjC,IAAAkC,EAAAlC,GAEAiC,EAAAjC,GAAAkC,EAAAlC,GAKA,IAAAsC,EAAA,GACA,QAAAtC,KAAAiC,EACAK,EAAArH,KAAA,CAAkBoE,IAAAW,EAAAuC,MAAAN,EAAAjC,KAIlB,OADAsC,EAAAE,KAAA,SAAAC,EAAAC,GAAgC,OAAAA,EAAAH,MAAAE,EAAAF,QAChCD,GAWAvJ,EAAAG,MAAAkB,UAAA+H,YAAA,SAAAH,EAAAxC,EAAAxG,GACA,IAAA2J,EAAA3J,EAAAwG,GAAAoD,KACAC,EAAA7J,EAAAwG,GAAAqD,OACAR,EAAArJ,EAAAwG,GAAA6C,MACAS,EAAA,KACAC,EAAA,GAGA,OAAAV,EAmFA,OA/EAL,EAAAlH,QAAA,SAAAiB,GACA,IAAAM,EAAA,CAAAN,GACA,GAAA8G,IACAxG,EAAAxC,KAAA+E,MAAAY,GAAAwD,YAAAjH,IAoBA,IAAAkH,EAAA,GACA5G,EAAAvB,QAAA,SAAAoI,GACA,IAAAC,EAAAtJ,KAAA+E,MAAAY,GAAA4D,QAAAF,GACAlC,EAAAnH,KAAAmH,IAAAkC,EAAA1D,GAEA,GAAAsD,GAAA,OAAAH,EAAA,CAIA,IAAAU,EAAA,GACA,QAAArD,KAAA8C,EACA9C,KAAAmD,IACAE,EAAArD,GAAAmD,EAAAnD,IAGAmD,EAAAE,EAYA,QAAArD,KAJAkD,GAAAnH,GACAlC,KAAAyJ,iBAAAP,EAAAG,EAAAC,GAGAA,EAAA,CACA,IAAA3C,EAAA3G,KAAA+E,MAAAY,GAAA+D,iBAAAL,EAAAlD,GACAwD,EAAA3J,KAAA6E,cAAA+E,eAAAzD,EAAAR,GACAkE,EAAA,EACA,GAAAF,IACAE,EAAA,EAAArD,KAAAC,KAAAkD,IAGA,IAAAG,EAAA,EACAT,GAAAnH,IAGA4H,EAAA,QAAAT,EAAA5H,OAAAS,EAAAT,QAAA4H,EAAA5H,SAGA,IAAAiH,EAAA/B,EAAAQ,EAAA0C,EAAAC,EAEA3D,KAAAiD,EACAA,EAAAjD,IAAAuC,EAEAU,EAAAjD,GAAAuC,IAGK1I,MAELiJ,EAAAjJ,KAAA+J,YAAAd,EAAAG,EAAAN,IACG9I,MAEHiJ,EAAAjJ,KAAAgK,UAAAf,EAAAC,EAAAf,EAAA1G,SAgBAvC,EAAAG,MAAAkB,UAAAwJ,YAAA,SAAAE,EAAAhB,EAAAiB,GACA,IAAAD,EACA,OAAAhB,EAEA,UAAAiB,EAAA,CACA,IAAAC,EAAA,GACA,QAAAhE,KAAA8C,EACA9C,KAAA8D,IACAE,EAAAhE,GAAA8D,EAAA9D,GAAA8C,EAAA9C,IAGA,OAAAgE,EAEA,QAAAhE,KAAA8C,EACA9C,KAAA8D,EACAA,EAAA9D,IAAA8C,EAAA9C,GAEA8D,EAAA9D,GAAA8C,EAAA9C,GAGA,OAAA8D,GAcA/K,EAAAG,MAAAkB,UAAAkJ,iBAAA,SAAAP,EAAAhH,EAAAoH,GACA,QAAArD,KAAAqD,EACArD,KAAAiD,EACAA,EAAAjD,GAAA7E,KAAAc,GAEAgH,EAAAjD,GAAA,CAAA/D,IAiBAhD,EAAAG,MAAAkB,UAAAyJ,UAAA,SAAAf,EAAAC,EAAAkB,GACA,QAAAnE,KAAAgD,EACA,GAAAhD,KAAAiD,EAAA,CACA,IAAA1G,EAAA0G,EAAAjD,GAAAxE,OACAwH,EAAAhD,GAAAgD,EAAAhD,GAAAzD,EAAA4H,EAGA,OAAAnB,GASA/J,EAAAG,MAAAkB,UAAAmE,OAAA,WACA,IAAA2F,EAAA,GAKA,OAJArK,KAAA2E,QAAA1D,QAAA,SAAAqE,GACA+E,EAAA/E,GAAAtF,KAAA+E,MAAAO,GAAAZ,UACG1E,MAEH,CACAJ,QAAAV,EAAAU,QACA2F,OAAAvF,KAAA2E,QACAa,IAAAxF,KAAA4E,KACAC,cAAA7E,KAAA6E,cAAAH,SACAK,MAAAsF,EACA/K,SAAAU,KAAAV,SAAAoF,WA8BAxF,EAAAG,MAAAkB,UAAA+J,IAAA,SAAAC,GACA,IAAA9J,EAAAC,MAAAH,UAAAI,MAAAhB,KAAAiB,UAAA,GACAH,EAAA+J,QAAAxK,MACAuK,EAAA5I,MAAA3B,KAAAS,IAqBAvB,EAAA4F,cAAA,SAAAiB,GAEA/F,KAAAyK,MADA1E,SAGAA,EAGA/F,KAAAsJ,KAAA,GACAtJ,KAAA0K,QAAA,GACA1K,KAAAyB,OAAA,GASAvC,EAAA4F,cAAArB,KAAA,SAAA4B,GACA,IAAAsF,EAAA,IAAA3K,KAOA,OALA2K,EAAAlJ,OAAA4D,EAAA5D,OACAkJ,EAAArB,KAAAjE,EAAAiE,KACAqB,EAAAD,QAAArF,EAAAqF,QACAC,EAAAF,MAAApF,EAAAU,KAEA4E,GAQAzL,EAAA4F,cAAAvE,UAAAsG,YAAA,WACA,OAAA7G,KAAAyK,OAYAvL,EAAA4F,cAAAvE,UAAAyF,OAAA,SAAAG,EAAAF,GACAjG,KAAA8G,OAAAX,IAAAnG,KAAAyB,UAEA,IAAAzB,KAAAyK,MACAzK,KAAAsJ,KAAAnD,GAiHA,SAAA/F,GACA,UAAAA,GAAA,iBAAAA,EAAA,OAAAA,EAEA,IAAAwK,EAAAxK,EAAAyK,cAEA,QAAAC,KAAA1K,EACAA,EAAAmH,eAAAuD,KAAAF,EAAAE,GAAA1K,EAAA0K,IAGA,OAAAF,EA1HAG,CAAA9E,GAEAjG,KAAAsJ,KAAAnD,GAAA,MAcAjH,EAAA4F,cAAAvE,UAAAwG,OAAA,SAAAZ,GACA,WAAAnG,KAAA8G,OAAAX,GAAA,KACAnG,KAAAsJ,KAAAnD,IAUAjH,EAAA4F,cAAAvE,UAAAuG,OAAA,SAAAX,GACA,OAAAA,KAAAnG,KAAAsJ,MASApK,EAAA4F,cAAAvE,UAAAyG,UAAA,SAAAb,GACAnG,KAAA8G,OAAAX,YAEAnG,KAAAsJ,KAAAnD,UACAnG,KAAA0K,QAAAvE,GACAnG,KAAAyB,WAWAvC,EAAA4F,cAAAvE,UAAA8F,eAAA,SAAAF,EAAAR,EAAAlE,GACA0E,SACA,GAAAnG,KAAA8G,OAAAX,KAEAnG,KAAA0K,QAAAvE,KAAAnG,KAAA0K,QAAAvE,GAAA,IACAnG,KAAA0K,QAAAvE,GAAAR,GAAAlE,IAWAvC,EAAA4F,cAAAvE,UAAAyK,kBAAA,SAAA7E,EAAAR,EAAAlE,GACA0E,SACA,GAAAnG,KAAA8G,OAAAX,IAEAnG,KAAAqG,eAAAF,EAAAR,EAAAlE,IAUAvC,EAAA4F,cAAAvE,UAAAqJ,eAAA,SAAAzD,EAAAR,GACA,OAAAQ,QAAA,EAEAA,KAAAnG,KAAAsJ,MACA3D,KAAA3F,KAAA0K,QAAAvE,GACAnG,KAAA0K,QAAAvE,GAAAR,GAFA,GAWAzG,EAAA4F,cAAAvE,UAAAmE,OAAA,WACA,OACA4E,KAAAtJ,KAAAsJ,KACAoB,QAAA1K,KAAA0K,QACAjJ,OAAAzB,KAAAyB,OACAsE,KAAA/F,KAAAyK,QAqCAvL,EAAAQ,SACAhC,EAAA,CACAuN,QAAA,MACAC,OAAA,OACAC,KAAA,OACAC,KAAA,OACAC,KAAA,MACAC,IAAA,MACAC,KAAA,KACAC,MAAA,MACAC,IAAA,IACAC,MAAA,MACAC,QAAA,MACAC,MAAA,MACAC,KAAA,MACAC,MAAA,KACAC,QAAA,MACAC,QAAA,MACAC,QAAA,MACAC,MAAA,KACAC,MAAA,MACAC,OAAA,MACAC,KAAA,OAGA1O,EAAA,CACA2O,MAAA,KACAC,MAAA,GACAC,MAAA,KACAC,MAAA,KACAC,KAAA,KACAC,IAAA,GACAC,KAAA,IAIAhP,EAAA,WACAC,EAAAgP,qBAQA/O,EAAA,IAAAgP,OALA,4DAMA/O,EAAA,IAAA+O,OAJA,8FAKA9O,EAAA,IAAA8O,OANA,gFAOA7O,EAAA,IAAA6O,OALA,kCAOA5O,EAAA,kBACAC,EAAA,iBACAC,EAAA,aACAC,EAAA,kBACAC,EAAA,KACAC,EAAA,cACAC,EAAA,IAAAsO,OAAA,sBACArO,EAAA,IAAAqO,OAAA,IAAAjP,EAAAD,EAAA,gBAEAc,EAAA,mBACAC,EAAA,2IAEAC,EAAA,iDAEAC,EAAA,sFACAC,EAAA,oBAEAC,EAAA,WACAC,EAAA,MACAC,EAAA,IAAA6N,OAAA,IAAAjP,EAAAD,EAAA,gBAEA,SAAAmP,GACA,IAAAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EAEA,GAAAP,EAAAtL,OAAA,EAAuB,OAAAsL,EAiBvB,GAdA,MADAG,EAAAH,EAAAQ,OAAA,QAEAR,EAAAG,EAAAM,cAAAT,EAAAQ,OAAA,IAKAH,EAAAjP,GADAgP,EAAAjP,GAGAuP,KAAAV,GAAqBA,IAAAW,QAAAP,EAAA,QACrBC,EAAAK,KAAAV,KAA2BA,IAAAW,QAAAN,EAAA,SAI3BA,EAAA/O,GADA8O,EAAA/O,GAEAqP,KAAAV,GAAA,CACA,IAAAY,EAAAR,EAAAS,KAAAb,IACAI,EAAArP,GACA2P,KAAAE,EAAA,MACAR,EAAA7O,EACAyO,IAAAW,QAAAP,EAAA,UAEKC,EAAAK,KAAAV,KAELC,GADAW,EAAAP,EAAAQ,KAAAb,IACA,IACAK,EAAAnP,GACAwP,KAAAT,KAGAK,EAAA7O,EACA8O,EAAA7O,GAFA2O,EAAA7O,GAGAkP,KAJAV,EAAAC,GAI0BD,GAAA,IAC1BM,EAAAI,KAAAV,IAA+BI,EAAA7O,EAAcyO,IAAAW,QAAAP,EAAA,KAC7CG,EAAAG,KAAAV,KAA+BA,GAAA,OAiF/B,OA5EAI,EAAAzO,GACA+O,KAAAV,KAGAA,GADAC,GADAW,EAAAR,EAAAS,KAAAb,IACA,IACA,MAIAI,EAAAxO,GACA8O,KAAAV,KAEAC,GADAW,EAAAR,EAAAS,KAAAb,IACA,GACAE,EAAAU,EAAA,IACAR,EAAArP,GACA2P,KAAAT,KACAD,EAAAC,EAAAtP,EAAAuP,MAKAE,EAAAvO,GACA6O,KAAAV,KAEAC,GADAW,EAAAR,EAAAS,KAAAb,IACA,GACAE,EAAAU,EAAA,IACAR,EAAArP,GACA2P,KAAAT,KACAD,EAAAC,EAAArP,EAAAsP,KAMAG,EAAAtO,GADAqO,EAAAtO,GAEA4O,KAAAV,IAEAC,GADAW,EAAAR,EAAAS,KAAAb,IACA,IACAI,EAAApP,GACA0P,KAAAT,KACAD,EAAAC,IAEKI,EAAAK,KAAAV,KAELC,GADAW,EAAAP,EAAAQ,KAAAb,IACA,GAAAY,EAAA,IACAP,EAAArP,GACA0P,KAAAT,KACAD,EAAAC,KAKAG,EAAApO,GACA0O,KAAAV,KAEAC,GADAW,EAAAR,EAAAS,KAAAb,IACA,GAEAK,EAAApP,EACAqP,EAAApO,IAFAkO,EAAApP,GAGA0P,KAAAT,IAAAI,EAAAK,KAAAT,KAAAK,EAAAI,KAAAT,MACAD,EAAAC,IAKAI,EAAArP,GADAoP,EAAAnO,GAEAyO,KAAAV,IAAAK,EAAAK,KAAAV,KACAI,EAAA7O,EACAyO,IAAAW,QAAAP,EAAA,KAKA,KAAAD,IACAH,EAAAG,EAAA7K,cAAA0K,EAAAQ,OAAA,IAGAR,IAMA7N,EAAAgE,SAAAG,iBAAAnE,EAAAQ,QAAA,WAoBAR,EAAAO,eAAA,SAAAyC,GACA,GAAAA,IAAA,IAAAhD,EAAAO,eAAAoO,UAAA3L,GACA,OAAAA,GAWAhD,EAAA4O,eAAA,WACA5O,EAAAO,eAAAoO,UAAA,IAUA3O,EAAA6O,aAAA,SAAAC,GACA,MAAAA,IAAA,IAAAtN,MAAAqB,QAAAiM,IAEAA,EAAA/M,QAAA,SAAAgN,GACA/O,EAAAO,eAAAoO,UAAAI,IAAA,GACGjO,OASHd,EAAAgP,eAAA,WACAhP,EAAAO,eAAAoO,UAAA3O,EAAAiP,kBAGAjP,EAAAiP,iBAAA,CACAC,IAAA,EACAxF,GAAA,EACAyF,MAAA,EACAC,OAAA,EACAC,QAAA,EACA1K,OAAA,EACA2K,KAAA,EACAC,QAAA,EACAC,MAAA,EACAC,IAAA,EACAC,OAAA,EACAC,IAAA,EACAC,KAAA,EACAC,KAAA,EACAC,KAAA,EACAC,IAAA,EACAC,IAAA,EACAC,IAAA,EACAC,SAAA,EACAC,MAAA,EACAC,KAAA,EACAC,IAAA,EACAC,KAAA,EACAC,QAAA,EACAC,OAAA,EACAC,MAAA,EACAC,KAAA,EACAC,IAAA,EACAC,MAAA,EACAC,QAAA,EACAC,MAAA,EACAC,MAAA,EACAC,OAAA,EACAC,KAAA,EACAC,MAAA,EACA3L,KAAA,EACA4L,KAAA,EACAC,KAAA,EACAC,KAAA,EACAC,MAAA,EACAC,IAAA,EACAC,KAAA,EACAC,MAAA,EACAC,KAAA,EACAC,KAAA,EACAC,KAAA,EACAC,SAAA,EACAzM,GAAA,EACA0M,IAAA,EACAC,IAAA,EACAC,MAAA,EACAC,IAAA,EACAC,IAAA,EACAC,KAAA,EACAC,MAAA,EACAC,OAAA,EACAC,KAAA,EACAC,MAAA,EACAC,QAAA,EACAC,KAAA,EACAC,IAAA,EACAC,OAAA,EACAC,MAAA,EACAC,MAAA,EACAC,IAAA,EACAC,SAAA,EACAC,IAAA,EACAC,KAAA,EACAC,KAAA,EACAC,IAAA,EACAjN,KAAA,EACAkN,OAAA,EACApN,IAAA,EACAqN,MAAA,EACAC,IAAA,EACAC,OAAA,EACAC,KAAA,EACAC,KAAA,EACAC,QAAA,EACAC,MAAA,EACAC,KAAA,EACAC,MAAA,EACAC,KAAA,EACAC,QAAA,EACAC,OAAA,EACAC,IAAA,EACAC,MAAA,EACAC,MAAA,EACAC,MAAA,EACAC,KAAA,EACAC,OAAA,EACAC,MAAA,EACAC,MAAA,EACAC,OAAA,EACAC,OAAA,EACAC,MAAA,EACA7T,MAAA,EACA8T,KAAA,EACAC,IAAA,EACAC,KAAA,EACAC,MAAA,EACAC,IAAA,EACAC,OAAA,EACAC,KAAA,EACAC,IAAA,EACAC,MAAA,EACAC,MAAA,EACAC,MAAA,EACAC,OAAA,EACAC,OAAA,EACAC,OAAA,EACAC,KAAA,EACAC,MAAA,EACAC,KAAA,EACAC,MAAA,EACAC,MAAA,EACAC,OAAA,EACAC,KAAA,EACAC,KAAA,EACAC,MAAA,GAGAlW,EAAAO,eAAAoO,UAAA3O,EAAAiP,iBAEAjP,EAAAgE,SAAAG,iBAAAnE,EAAAO,eAAA,kBAqBAP,EAAAM,QAAA,SAAA0C,GACA,GAAAA,QACA,UAAA0B,MAAA,iCAGA,OAAA1B,EACAwL,QAAA,WACAA,QAAA,YAGAxO,EAAAgE,SAAAG,iBAAAnE,EAAAM,QAAA,WAaAN,EAAAuG,cAAA,WACAzF,KAAAqV,KAAA,CAAe/L,KAAA,GAAS9B,GAAA,IASxBtI,EAAAuG,cAAAhC,KAAA,SAAA4B,GACA,IAAAjG,EAAA,IAAAY,KAGA,OAFAZ,EAAAiW,KAAAhQ,EAAAgQ,KAEAjW,GAqBAF,EAAAuG,cAAAlF,UAAAmG,SAAA,SAAAxE,EAAAoT,EAAAD,GACAA,KAAArV,KAAAqV,KAGA,IAHA,IACAjW,EAAA,EAEAA,GAAA8C,EAAAT,OAAA,IACA,IAAA4H,EAAAnH,EAAA9C,GAEAiK,KAAAgM,MAAAhM,GAAA,CAAqCC,KAAA,GAAQ9B,GAAA,IAC7CpI,GAAA,EACAiW,IAAAhM,GAGA,IAAAlD,EAAAmP,EAAA9P,IACA6P,EAAA/L,KAAAnD,GAMAkP,EAAA/L,KAAAnD,GAAA,CAAyBQ,GAAA2O,EAAA3O,KAJzB0O,EAAA/L,KAAAnD,GAAA,CAAyBQ,GAAA2O,EAAA3O,IACzB0O,EAAA7N,IAAA,IAeAtI,EAAAuG,cAAAlF,UAAAgV,SAAA,SAAArT,GACA,IAAAA,EAAA,SAIA,IAFA,IAAAsT,EAAAxV,KAAAqV,KAEA/Q,EAAA,EAAiBA,EAAApC,EAAAT,OAAkB6C,IAAA,CACnC,IAAAkR,EAAAtT,EAAAoC,IAAA,SACAkR,IAAAtT,EAAAoC,IAGA,UAaApF,EAAAuG,cAAAlF,UAAAkV,QAAA,SAAAvT,GACA,IAAAA,EAAA,YAIA,IAFA,IAAAsT,EAAAxV,KAAAqV,KAEA/Q,EAAA,EAAiBA,EAAApC,EAAAT,OAAkB6C,IAAA,CACnC,IAAAkR,EAAAtT,EAAAoC,IAAA,YACAkR,IAAAtT,EAAAoC,IAGA,OAAAkR,GAYAtW,EAAAuG,cAAAlF,UAAAgJ,QAAA,SAAArH,GACA,IAAAsT,EAAAxV,KAAAyV,QAAAvT,GACA,aAAAsT,EACA,GAGAA,EAAAlM,MAaApK,EAAAuG,cAAAlF,UAAAmJ,iBAAA,SAAAxH,EAAAiE,GACA,IAAAqP,EAAAxV,KAAAyV,QAAAvT,GAEA,aAAAsT,EACA,EAGArP,KAAAqP,EAAAlM,KAIAkM,EAAAlM,KAAAnD,GAAAQ,GAHA,GAeAzH,EAAAuG,cAAAlF,UAAAkH,WAAA,SAAAvF,GACA,IAAAsT,EAAAxV,KAAAyV,QAAAvT,GAEA,aAAAsT,EACA,EAGAA,EAAAhO,IAWAtI,EAAAuG,cAAAlF,UAAA0G,YAAA,SAAA/E,EAAAsD,GACA,GAAAtD,EAAA,CACA,IAAAsT,EAAAxV,KAAAyV,QAAAvT,GAEA,MAAAsT,GAEAhQ,KAAAgQ,EAAAlM,cACAkM,EAAAlM,KAAA9D,GACAgQ,EAAAhO,IAAA,KAYAtI,EAAAuG,cAAAlF,UAAA4I,YAAA,SAAAjH,EAAAwT,EAAAL,GACA,SAAAnT,GAAA,IAAAA,EAAA,SACAwT,KAAA,GAEA,SAAAL,GAEA,OADAA,EAAArV,KAAAyV,QAAAvT,IACA,OAAAwT,EAKA,QAAArM,KAFAgM,EAAA7N,GAAA,GAAAkO,EAAAtU,KAAAc,GAEAmT,EACA,SAAAhM,GACA,OAAAA,GACArJ,KAAAmJ,YAAAjH,EAAAmH,EAAAqM,EAAAL,EAAAhM,IAGA,OAAAqM,GASAxW,EAAAuG,cAAAlF,UAAAmE,OAAA,WACA,OACA2Q,KAAArV,KAAAqV,OAgFAnW,EAAAgJ,cAAA,SAAA/I,EAAAoG,GACA,IAQAuC,EARA3I,KAAA,GAEA,GAAAyC,MAAA2D,GAAA,MAAAA,EACA,UAAA3B,MAAA,6BAGA5D,KAAAb,OAAA,GAGA,IACA2I,EAAAE,KAAA2N,MAAAxW,GACAa,KAAA4V,gBAAA9N,EAAAvC,GACG,MAAAsQ,GACH3W,EAAAY,MAAAC,KAAA,mEACAC,KAAA8V,mBAAAvQ,KASArG,EAAAgJ,cAAA3H,UAAAuV,mBAAA,SAAAvQ,GACAvF,KAAAwE,QACAe,EAAAtE,QAAA,SAAAqE,GACAtF,KAAAb,OAAAmG,GAAA,CACAkD,MAAA,EACAO,KAAA,KACAC,QAAA,IAEGhJ,OASHd,EAAAgJ,cAAA3H,UAAAqV,gBAAA,SAAAzW,EAAAoG,GACA,IAAAwQ,EAAA,KACAC,GAAA,EAWA,GATAhW,KAAAwE,QACA,SAAArF,IACA4W,EAAA5W,EAAA,MAAA4W,GAGA,WAAA5W,IACA6W,EAAA7W,EAAA,QAAA6W,GAGA,WAAA7W,EACA,QAAAmG,KAAAnG,EAAA,OACA,GAAAoG,EAAAhE,QAAA+D,IAAA,GACA,IAAA2Q,EAAA9W,EAAA,OAAAmG,GACA4Q,EAAAF,EACApU,MAAAqU,EAAAjN,SACAkN,EAAAD,EAAAjN,QAGAhJ,KAAAb,OAAAmG,GAAA,CACAkD,MAAAyN,EAAAzN,OAAA,IAAAyN,EAAAzN,MAAAyN,EAAAzN,MAAA,EACAO,KAAAkN,EAAAlN,MAAAgN,EACA/M,OAAAkN,QAGAhX,EAAAY,MAAAC,KAAA,4EAIAC,KAAAmW,wBAAAJ,EAAAC,EAAAzQ,IAWArG,EAAAgJ,cAAA3H,UAAA4V,wBAAA,SAAApN,EAAAC,EAAAzD,GACAA,EAAAtE,QAAA,SAAAqE,GACAtF,KAAAb,OAAAmG,GAAA,CACAkD,MAAA,EACAO,OACAC,WAEGhJ,OAMHd,EAAAgJ,cAAA3H,UAAAkE,IAAA,WACA,OAAAzE,KAAAb,QAMAD,EAAAgJ,cAAA3H,UAAAiE,MAAA,WACAxE,KAAAb,OAAA,IAqBAU,KAAAuW,UAAA,WACApW,KAAAyB,OAAA,EACAzB,KAAAqW,SAAA,IAUAxW,KAAAuW,UAAA3S,KAAA,SAAA4B,GACA,IAAAiR,EAAA,IAAAtW,KAKA,OAHAsW,EAAAD,SAAAhR,EACAiR,EAAA7U,OAAA4D,EAAA5D,OAEA6U,GAUAzW,KAAAuW,UAAA7V,UAAAhB,IAAA,WACA,IAAA+E,EAAAiS,EAEA,IAAAjS,EAAA,EAAaA,EAAA1D,UAAAa,OAAsB6C,IACnCiS,EAAA3V,UAAA0D,IACAtE,KAAAuB,QAAAgV,IACAvW,KAAAqW,SAAA7U,OAAAxB,KAAAwW,YAAAD,GAAA,EAAAA,GAGAvW,KAAAyB,OAAAzB,KAAAqW,SAAA5U,QASA5B,KAAAuW,UAAA7V,UAAAkW,QAAA,WACA,OAAAzW,KAAAqW,SAAA1V,SAgBAd,KAAAuW,UAAA7V,UAAA4B,IAAA,SAAAtB,EAAA6V,GACA,OAAA1W,KAAAqW,SAAAlU,IAAAtB,EAAA6V,IAcA7W,KAAAuW,UAAA7V,UAAAU,QAAA,SAAAJ,EAAA6V,GACA,OAAA1W,KAAAqW,SAAApV,QAAAJ,EAAA6V,IAWA7W,KAAAuW,UAAA7V,UAAAgB,QAAA,SAAAoV,GAOA,IANA,IAAAC,EAAA,EACAC,EAAA7W,KAAAqW,SAAA5U,OACAqV,EAAAD,EAAAD,EACAG,EAAAH,EAAApQ,KAAAwQ,MAAAF,EAAA,GACAG,EAAAjX,KAAAqW,SAAAU,GAEAD,EAAA,IACA,GAAAG,IAAAN,EAAA,OAAAI,EAEAE,EAAAN,IAAAC,EAAAG,GACAE,EAAAN,IAAAE,EAAAE,GAEAD,EAAAD,EAAAD,EACAG,EAAAH,EAAApQ,KAAAwQ,MAAAF,EAAA,GACAG,EAAAjX,KAAAqW,SAAAU,GAGA,OAAAE,IAAAN,EAAAI,GAEA,GAcAlX,KAAAuW,UAAA7V,UAAAiW,YAAA,SAAAG,GAOA,IANA,IAAAC,EAAA,EACAC,EAAA7W,KAAAqW,SAAA5U,OACAqV,EAAAD,EAAAD,EACAG,EAAAH,EAAApQ,KAAAwQ,MAAAF,EAAA,GACAG,EAAAjX,KAAAqW,SAAAU,GAEAD,EAAA,GACAG,EAAAN,IAAAC,EAAAG,GACAE,EAAAN,IAAAE,EAAAE,GAEAD,EAAAD,EAAAD,EACAG,EAAAH,EAAApQ,KAAAwQ,MAAAF,EAAA,GACAG,EAAAjX,KAAAqW,SAAAU,GAGA,OAAAE,EAAAN,EAAAI,EACAE,EAAAN,EAAAI,EAAA,UAWAlX,KAAAuW,UAAA7V,UAAA2W,UAAA,SAAAC,GAMA,IALA,IAAAC,EAAA,IAAAvX,KAAAuW,UACA9R,EAAA,EAAAC,EAAA,EACA8S,EAAArX,KAAAyB,OAAA6V,EAAAH,EAAA1V,OACAmH,EAAA5I,KAAAqW,SAAAxN,EAAAsO,EAAAd,WAGA/R,EAAA+S,EAAA,GAAA9S,EAAA+S,EAAA,IAEA1O,EAAAtE,KAAAuE,EAAAtE,GAMAqE,EAAAtE,GAAAuE,EAAAtE,GACAD,IAIAsE,EAAAtE,GAAAuE,EAAAtE,IACAA,KAXA6S,EAAA7X,IAAAqJ,EAAAtE,IACAA,IAAAC,KAeA,OAAA6S,GASAvX,KAAAuW,UAAA7V,UAAAwK,MAAA,WACA,IAAAA,EAAA,IAAAlL,KAAAuW,UAKA,OAHArL,EAAAsL,SAAArW,KAAAyW,UACA1L,EAAAtJ,OAAAsJ,EAAAsL,SAAA5U,OAEAsJ,GAWAlL,KAAAuW,UAAA7V,UAAAgX,MAAA,SAAAJ,GACA,IAAAK,EAAAC,EAAAC,EAEA1X,KAAAyB,QAAA0V,EAAA1V,QACA+V,EAAAxX,KAAAyX,EAAAN,IAEAK,EAAAL,EAAAM,EAAAzX,MAGA0X,EAAAF,EAAAzM,QAEA,QAAAzG,EAAA,EAAAqT,EAAAF,EAAAhB,UAAuDnS,EAAAqT,EAAAlW,OAA6B6C,IACpFoT,EAAAnY,IAAAoY,EAAArT,IAGA,OAAAoT,GASA7X,KAAAuW,UAAA7V,UAAAmE,OAAA,WACA,OAAA1E,KAAAyW,gBASoB7U,KAAApE,EAAA,mBAAdD,EAYH,WAMH,OAAA2B,IAlBoB3B,EAAAoC,KAAAiY,EAAAC,EAAAD,EAAAE,GAAAva,KAAAua,EAAAF,QAAApa,GAt5EpB","file":"9-2152530669b4465e711f.js","sourcesContent":["/**\n * elasticlunr - http://weixsong.github.io\n * Lightweight full-text search engine in Javascript for browser search and offline search. - 0.9.5\n *\n * Copyright (C) 2016 Oliver Nightingale\n * Copyright (C) 2016 Wei Song\n * MIT Licensed\n * @license\n */\n\n(function(){\n\n/*!\n * elasticlunr.js\n * Copyright (C) 2016 Oliver Nightingale\n * Copyright (C) 2016 Wei Song\n */\n\n/**\n * Convenience function for instantiating a new elasticlunr index and configuring it\n * with the default pipeline functions and the passed config function.\n *\n * When using this convenience function a new index will be created with the\n * following functions already in the pipeline:\n * \n * 1. elasticlunr.trimmer - trim non-word character\n * 2. elasticlunr.StopWordFilter - filters out any stop words before they enter the\n * index\n * 3. elasticlunr.stemmer - stems the tokens before entering the index.\n *\n *\n * Example:\n *\n * var idx = elasticlunr(function () {\n * this.addField('id');\n * this.addField('title');\n * this.addField('body');\n * \n * //this.setRef('id'); // default ref is 'id'\n *\n * this.pipeline.add(function () {\n * // some custom pipeline function\n * });\n * });\n * \n * idx.addDoc({\n * id: 1, \n * title: 'Oracle released database 12g',\n * body: 'Yestaday, Oracle has released their latest database, named 12g, more robust. this product will increase Oracle profit.'\n * });\n * \n * idx.addDoc({\n * id: 2, \n * title: 'Oracle released annual profit report',\n * body: 'Yestaday, Oracle has released their annual profit report of 2015, total profit is 12.5 Billion.'\n * });\n * \n * # simple search\n * idx.search('oracle database');\n * \n * # search with query-time boosting\n * idx.search('oracle database', {fields: {title: {boost: 2}, body: {boost: 1}}});\n *\n * @param {Function} config A function that will be called with the new instance\n * of the elasticlunr.Index as both its context and first parameter. It can be used to\n * customize the instance of new elasticlunr.Index.\n * @namespace\n * @module\n * @return {elasticlunr.Index}\n *\n */\nvar elasticlunr = function (config) {\n var idx = new elasticlunr.Index;\n\n idx.pipeline.add(\n elasticlunr.trimmer,\n elasticlunr.stopWordFilter,\n elasticlunr.stemmer\n );\n\n if (config) config.call(idx, idx);\n\n return idx;\n};\n\nelasticlunr.version = \"0.9.5\";\n\n// only used this to make elasticlunr.js compatible with lunr-languages\n// this is a trick to define a global alias of elasticlunr\nlunr = elasticlunr;\n\n/*!\n * elasticlunr.utils\n * Copyright (C) 2016 Oliver Nightingale\n * Copyright (C) 2016 Wei Song\n */\n\n/**\n * A namespace containing utils for the rest of the elasticlunr library\n */\nelasticlunr.utils = {};\n\n/**\n * Print a warning message to the console.\n *\n * @param {String} message The message to be printed.\n * @memberOf Utils\n */\nelasticlunr.utils.warn = (function (global) {\n return function (message) {\n if (global.console && console.warn) {\n console.warn(message);\n }\n };\n})(this);\n\n/**\n * Convert an object to string.\n *\n * In the case of `null` and `undefined` the function returns\n * an empty string, in all other cases the result of calling\n * `toString` on the passed object is returned.\n *\n * @param {object} obj The object to convert to a string.\n * @return {String} string representation of the passed object.\n * @memberOf Utils\n */\nelasticlunr.utils.toString = function (obj) {\n if (obj === void 0 || obj === null) {\n return \"\";\n }\n\n return obj.toString();\n};\n/*!\n * elasticlunr.EventEmitter\n * Copyright (C) 2016 Oliver Nightingale\n * Copyright (C) 2016 Wei Song\n */\n\n/**\n * elasticlunr.EventEmitter is an event emitter for elasticlunr.\n * It manages adding and removing event handlers and triggering events and their handlers.\n *\n * Each event could has multiple corresponding functions,\n * these functions will be called as the sequence that they are added into the event.\n * \n * @constructor\n */\nelasticlunr.EventEmitter = function () {\n this.events = {};\n};\n\n/**\n * Binds a handler function to a specific event(s).\n *\n * Can bind a single function to many different events in one call.\n *\n * @param {String} [eventName] The name(s) of events to bind this function to.\n * @param {Function} fn The function to call when an event is fired.\n * @memberOf EventEmitter\n */\nelasticlunr.EventEmitter.prototype.addListener = function () {\n var args = Array.prototype.slice.call(arguments),\n fn = args.pop(),\n names = args;\n\n if (typeof fn !== \"function\") throw new TypeError (\"last argument must be a function\");\n\n names.forEach(function (name) {\n if (!this.hasHandler(name)) this.events[name] = [];\n this.events[name].push(fn);\n }, this);\n};\n\n/**\n * Removes a handler function from a specific event.\n *\n * @param {String} eventName The name of the event to remove this function from.\n * @param {Function} fn The function to remove from an event.\n * @memberOf EventEmitter\n */\nelasticlunr.EventEmitter.prototype.removeListener = function (name, fn) {\n if (!this.hasHandler(name)) return;\n\n var fnIndex = this.events[name].indexOf(fn);\n if (fnIndex === -1) return;\n\n this.events[name].splice(fnIndex, 1);\n\n if (this.events[name].length == 0) delete this.events[name];\n};\n\n/**\n * Call all functions that bounded to the given event.\n *\n * Additional data can be passed to the event handler as arguments to `emit`\n * after the event name.\n *\n * @param {String} eventName The name of the event to emit.\n * @memberOf EventEmitter\n */\nelasticlunr.EventEmitter.prototype.emit = function (name) {\n if (!this.hasHandler(name)) return;\n\n var args = Array.prototype.slice.call(arguments, 1);\n\n this.events[name].forEach(function (fn) {\n fn.apply(undefined, args);\n }, this);\n};\n\n/**\n * Checks whether a handler has ever been stored against an event.\n *\n * @param {String} eventName The name of the event to check.\n * @private\n * @memberOf EventEmitter\n */\nelasticlunr.EventEmitter.prototype.hasHandler = function (name) {\n return name in this.events;\n};\n/*!\n * elasticlunr.tokenizer\n * Copyright (C) 2016 Oliver Nightingale\n * Copyright (C) 2016 Wei Song\n */\n\n/**\n * A function for splitting a string into tokens.\n * Currently English is supported as default.\n * Uses `elasticlunr.tokenizer.seperator` to split strings, you could change\n * the value of this property to set how you want strings are split into tokens.\n * IMPORTANT: use elasticlunr.tokenizer.seperator carefully, if you are not familiar with\n * text process, then you'd better not change it.\n *\n * @module\n * @param {String} str The string that you want to tokenize.\n * @see elasticlunr.tokenizer.seperator\n * @return {Array}\n */\nelasticlunr.tokenizer = function (str) {\n if (!arguments.length || str === null || str === undefined) return [];\n if (Array.isArray(str)) {\n var arr = str.filter(function(token) {\n if (token === null || token === undefined) {\n return false;\n }\n\n return true;\n });\n\n arr = arr.map(function (t) {\n return elasticlunr.utils.toString(t).toLowerCase();\n });\n\n var out = [];\n arr.forEach(function(item) {\n var tokens = item.split(elasticlunr.tokenizer.seperator);\n out = out.concat(tokens);\n }, this);\n\n return out;\n }\n\n return str.toString().trim().toLowerCase().split(elasticlunr.tokenizer.seperator);\n};\n\n/**\n * Default string seperator.\n */\nelasticlunr.tokenizer.defaultSeperator = /[\\s\\-]+/;\n\n/**\n * The sperator used to split a string into tokens. Override this property to change the behaviour of\n * `elasticlunr.tokenizer` behaviour when tokenizing strings. By default this splits on whitespace and hyphens.\n *\n * @static\n * @see elasticlunr.tokenizer\n */\nelasticlunr.tokenizer.seperator = elasticlunr.tokenizer.defaultSeperator;\n\n/**\n * Set up customized string seperator\n *\n * @param {Object} sep The customized seperator that you want to use to tokenize a string.\n */\nelasticlunr.tokenizer.setSeperator = function(sep) {\n if (sep !== null && sep !== undefined && typeof(sep) === 'object') {\n elasticlunr.tokenizer.seperator = sep;\n }\n}\n\n/**\n * Reset string seperator\n *\n */\nelasticlunr.tokenizer.resetSeperator = function() {\n elasticlunr.tokenizer.seperator = elasticlunr.tokenizer.defaultSeperator;\n}\n\n/**\n * Get string seperator\n *\n */\nelasticlunr.tokenizer.getSeperator = function() {\n return elasticlunr.tokenizer.seperator;\n}\n/*!\n * elasticlunr.Pipeline\n * Copyright (C) 2016 Oliver Nightingale\n * Copyright (C) 2016 Wei Song\n */\n\n/**\n * elasticlunr.Pipelines maintain an ordered list of functions to be applied to \n * both documents tokens and query tokens.\n *\n * An instance of elasticlunr.Index will contain a pipeline\n * with a trimmer, a stop word filter, an English stemmer. Extra\n * functions can be added before or after either of these functions or these\n * default functions can be removed.\n *\n * When run the pipeline, it will call each function in turn.\n *\n * The output of the functions in the pipeline will be passed to the next function\n * in the pipeline. To exclude a token from entering the index the function\n * should return undefined, the rest of the pipeline will not be called with\n * this token.\n *\n * For serialisation of pipelines to work, all functions used in an instance of\n * a pipeline should be registered with elasticlunr.Pipeline. Registered functions can\n * then be loaded. If trying to load a serialised pipeline that uses functions\n * that are not registered an error will be thrown.\n *\n * If not planning on serialising the pipeline then registering pipeline functions\n * is not necessary.\n *\n * @constructor\n */\nelasticlunr.Pipeline = function () {\n this._queue = [];\n};\n\nelasticlunr.Pipeline.registeredFunctions = {};\n\n/**\n * Register a function in the pipeline.\n *\n * Functions that are used in the pipeline should be registered if the pipeline\n * needs to be serialised, or a serialised pipeline needs to be loaded.\n *\n * Registering a function does not add it to a pipeline, functions must still be\n * added to instances of the pipeline for them to be used when running a pipeline.\n *\n * @param {Function} fn The function to register.\n * @param {String} label The label to register this function with\n * @memberOf Pipeline\n */\nelasticlunr.Pipeline.registerFunction = function (fn, label) {\n if (label in elasticlunr.Pipeline.registeredFunctions) {\n elasticlunr.utils.warn('Overwriting existing registered function: ' + label);\n }\n\n fn.label = label;\n elasticlunr.Pipeline.registeredFunctions[label] = fn;\n};\n\n/**\n * Get a registered function in the pipeline.\n *\n * @param {String} label The label of registered function.\n * @return {Function}\n * @memberOf Pipeline\n */\nelasticlunr.Pipeline.getRegisteredFunction = function (label) {\n if ((label in elasticlunr.Pipeline.registeredFunctions) !== true) {\n return null;\n }\n\n return elasticlunr.Pipeline.registeredFunctions[label];\n};\n\n/**\n * Warns if the function is not registered as a Pipeline function.\n *\n * @param {Function} fn The function to check for.\n * @private\n * @memberOf Pipeline\n */\nelasticlunr.Pipeline.warnIfFunctionNotRegistered = function (fn) {\n var isRegistered = fn.label && (fn.label in this.registeredFunctions);\n\n if (!isRegistered) {\n elasticlunr.utils.warn('Function is not registered with pipeline. This may cause problems when serialising the index.\\n', fn);\n }\n};\n\n/**\n * Loads a previously serialised pipeline.\n *\n * All functions to be loaded must already be registered with elasticlunr.Pipeline.\n * If any function from the serialised data has not been registered then an\n * error will be thrown.\n *\n * @param {Object} serialised The serialised pipeline to load.\n * @return {elasticlunr.Pipeline}\n * @memberOf Pipeline\n */\nelasticlunr.Pipeline.load = function (serialised) {\n var pipeline = new elasticlunr.Pipeline;\n\n serialised.forEach(function (fnName) {\n var fn = elasticlunr.Pipeline.getRegisteredFunction(fnName);\n\n if (fn) {\n pipeline.add(fn);\n } else {\n throw new Error('Cannot load un-registered function: ' + fnName);\n }\n });\n\n return pipeline;\n};\n\n/**\n * Adds new functions to the end of the pipeline.\n *\n * Logs a warning if the function has not been registered.\n *\n * @param {Function} functions Any number of functions to add to the pipeline.\n * @memberOf Pipeline\n */\nelasticlunr.Pipeline.prototype.add = function () {\n var fns = Array.prototype.slice.call(arguments);\n\n fns.forEach(function (fn) {\n elasticlunr.Pipeline.warnIfFunctionNotRegistered(fn);\n this._queue.push(fn);\n }, this);\n};\n\n/**\n * Adds a single function after a function that already exists in the\n * pipeline.\n *\n * Logs a warning if the function has not been registered.\n * If existingFn is not found, throw an Exception.\n *\n * @param {Function} existingFn A function that already exists in the pipeline.\n * @param {Function} newFn The new function to add to the pipeline.\n * @memberOf Pipeline\n */\nelasticlunr.Pipeline.prototype.after = function (existingFn, newFn) {\n elasticlunr.Pipeline.warnIfFunctionNotRegistered(newFn);\n\n var pos = this._queue.indexOf(existingFn);\n if (pos === -1) {\n throw new Error('Cannot find existingFn');\n }\n\n this._queue.splice(pos + 1, 0, newFn);\n};\n\n/**\n * Adds a single function before a function that already exists in the\n * pipeline.\n *\n * Logs a warning if the function has not been registered.\n * If existingFn is not found, throw an Exception.\n *\n * @param {Function} existingFn A function that already exists in the pipeline.\n * @param {Function} newFn The new function to add to the pipeline.\n * @memberOf Pipeline\n */\nelasticlunr.Pipeline.prototype.before = function (existingFn, newFn) {\n elasticlunr.Pipeline.warnIfFunctionNotRegistered(newFn);\n\n var pos = this._queue.indexOf(existingFn);\n if (pos === -1) {\n throw new Error('Cannot find existingFn');\n }\n\n this._queue.splice(pos, 0, newFn);\n};\n\n/**\n * Removes a function from the pipeline.\n *\n * @param {Function} fn The function to remove from the pipeline.\n * @memberOf Pipeline\n */\nelasticlunr.Pipeline.prototype.remove = function (fn) {\n var pos = this._queue.indexOf(fn);\n if (pos === -1) {\n return;\n }\n\n this._queue.splice(pos, 1);\n};\n\n/**\n * Runs the current list of functions that registered in the pipeline against the\n * input tokens.\n *\n * @param {Array} tokens The tokens to run through the pipeline.\n * @return {Array}\n * @memberOf Pipeline\n */\nelasticlunr.Pipeline.prototype.run = function (tokens) {\n var out = [],\n tokenLength = tokens.length,\n pipelineLength = this._queue.length;\n\n for (var i = 0; i < tokenLength; i++) {\n var token = tokens[i];\n\n for (var j = 0; j < pipelineLength; j++) {\n token = this._queue[j](token, i, tokens);\n if (token === void 0 || token === null) break;\n };\n\n if (token !== void 0 && token !== null) out.push(token);\n };\n\n return out;\n};\n\n/**\n * Resets the pipeline by removing any existing processors.\n *\n * @memberOf Pipeline\n */\nelasticlunr.Pipeline.prototype.reset = function () {\n this._queue = [];\n};\n\n /**\n * Get the pipeline if user want to check the pipeline.\n *\n * @memberOf Pipeline\n */\n elasticlunr.Pipeline.prototype.get = function () {\n return this._queue;\n };\n\n/**\n * Returns a representation of the pipeline ready for serialisation.\n * Only serialize pipeline function's name. Not storing function, so when\n * loading the archived JSON index file, corresponding pipeline function is \n * added by registered function of elasticlunr.Pipeline.registeredFunctions\n *\n * Logs a warning if the function has not been registered.\n *\n * @return {Array}\n * @memberOf Pipeline\n */\nelasticlunr.Pipeline.prototype.toJSON = function () {\n return this._queue.map(function (fn) {\n elasticlunr.Pipeline.warnIfFunctionNotRegistered(fn);\n return fn.label;\n });\n};\n/*!\n * elasticlunr.Index\n * Copyright (C) 2016 Oliver Nightingale\n * Copyright (C) 2016 Wei Song\n */\n\n/**\n * elasticlunr.Index is object that manages a search index. It contains the indexes\n * and stores all the tokens and document lookups. It also provides the main\n * user facing API for the library.\n *\n * @constructor\n */\nelasticlunr.Index = function () {\n this._fields = [];\n this._ref = 'id';\n this.pipeline = new elasticlunr.Pipeline;\n this.documentStore = new elasticlunr.DocumentStore;\n this.index = {};\n this.eventEmitter = new elasticlunr.EventEmitter;\n this._idfCache = {};\n\n this.on('add', 'remove', 'update', (function () {\n this._idfCache = {};\n }).bind(this));\n};\n\n/**\n * Bind a handler to events being emitted by the index.\n *\n * The handler can be bound to many events at the same time.\n *\n * @param {String} [eventName] The name(s) of events to bind the function to.\n * @param {Function} fn The serialised set to load.\n * @memberOf Index\n */\nelasticlunr.Index.prototype.on = function () {\n var args = Array.prototype.slice.call(arguments);\n return this.eventEmitter.addListener.apply(this.eventEmitter, args);\n};\n\n/**\n * Removes a handler from an event being emitted by the index.\n *\n * @param {String} eventName The name of events to remove the function from.\n * @param {Function} fn The serialised set to load.\n * @memberOf Index\n */\nelasticlunr.Index.prototype.off = function (name, fn) {\n return this.eventEmitter.removeListener(name, fn);\n};\n\n/**\n * Loads a previously serialised index.\n *\n * Issues a warning if the index being imported was serialised\n * by a different version of elasticlunr.\n *\n * @param {Object} serialisedData The serialised set to load.\n * @return {elasticlunr.Index}\n * @memberOf Index\n */\nelasticlunr.Index.load = function (serialisedData) {\n if (serialisedData.version !== elasticlunr.version) {\n elasticlunr.utils.warn('version mismatch: current '\n + elasticlunr.version + ' importing ' + serialisedData.version);\n }\n\n var idx = new this;\n\n idx._fields = serialisedData.fields;\n idx._ref = serialisedData.ref;\n idx.documentStore = elasticlunr.DocumentStore.load(serialisedData.documentStore);\n idx.pipeline = elasticlunr.Pipeline.load(serialisedData.pipeline);\n idx.index = {};\n for (var field in serialisedData.index) {\n idx.index[field] = elasticlunr.InvertedIndex.load(serialisedData.index[field]);\n }\n\n return idx;\n};\n\n/**\n * Adds a field to the list of fields that will be searchable within documents in the index.\n *\n * Remember that inner index is build based on field, which means each field has one inverted index.\n *\n * Fields should be added before any documents are added to the index, fields\n * that are added after documents are added to the index will only apply to new\n * documents added to the index.\n *\n * @param {String} fieldName The name of the field within the document that should be indexed\n * @return {elasticlunr.Index}\n * @memberOf Index\n */\nelasticlunr.Index.prototype.addField = function (fieldName) {\n this._fields.push(fieldName);\n this.index[fieldName] = new elasticlunr.InvertedIndex;\n return this;\n};\n\n/**\n * Sets the property used to uniquely identify documents added to the index,\n * by default this property is 'id'.\n *\n * This should only be changed before adding documents to the index, changing\n * the ref property without resetting the index can lead to unexpected results.\n *\n * @param {String} refName The property to use to uniquely identify the\n * documents in the index.\n * @param {Boolean} emitEvent Whether to emit add events, defaults to true\n * @return {elasticlunr.Index}\n * @memberOf Index\n */\nelasticlunr.Index.prototype.setRef = function (refName) {\n this._ref = refName;\n return this;\n};\n\n/**\n *\n * Set if the JSON format original documents are save into elasticlunr.DocumentStore\n *\n * Defaultly save all the original JSON documents.\n *\n * @param {Boolean} save Whether to save the original JSON documents.\n * @return {elasticlunr.Index}\n * @memberOf Index\n */\nelasticlunr.Index.prototype.saveDocument = function (save) {\n this.documentStore = new elasticlunr.DocumentStore(save);\n return this;\n};\n\n/**\n * Add a JSON format document to the index.\n *\n * This is the way new documents enter the index, this function will run the\n * fields from the document through the index's pipeline and then add it to\n * the index, it will then show up in search results.\n *\n * An 'add' event is emitted with the document that has been added and the index\n * the document has been added to. This event can be silenced by passing false\n * as the second argument to add.\n *\n * @param {Object} doc The JSON format document to add to the index.\n * @param {Boolean} emitEvent Whether or not to emit events, default true.\n * @memberOf Index\n */\nelasticlunr.Index.prototype.addDoc = function (doc, emitEvent) {\n if (!doc) return;\n var emitEvent = emitEvent === undefined ? true : emitEvent;\n\n var docRef = doc[this._ref];\n\n this.documentStore.addDoc(docRef, doc);\n this._fields.forEach(function (field) {\n var fieldTokens = this.pipeline.run(elasticlunr.tokenizer(doc[field]));\n this.documentStore.addFieldLength(docRef, field, fieldTokens.length);\n\n var tokenCount = {};\n fieldTokens.forEach(function (token) {\n if (token in tokenCount) tokenCount[token] += 1;\n else tokenCount[token] = 1;\n }, this);\n\n for (var token in tokenCount) {\n var termFrequency = tokenCount[token];\n termFrequency = Math.sqrt(termFrequency);\n this.index[field].addToken(token, { ref: docRef, tf: termFrequency });\n }\n }, this);\n\n if (emitEvent) this.eventEmitter.emit('add', doc, this);\n};\n\n/**\n * Removes a document from the index by doc ref.\n *\n * To make sure documents no longer show up in search results they can be\n * removed from the index using this method.\n *\n * A 'remove' event is emitted with the document that has been removed and the index\n * the document has been removed from. This event can be silenced by passing false\n * as the second argument to remove.\n *\n * If user setting DocumentStore not storing the documents, then remove doc by docRef is not allowed.\n *\n * @param {String|Integer} docRef The document ref to remove from the index.\n * @param {Boolean} emitEvent Whether to emit remove events, defaults to true\n * @memberOf Index\n */\nelasticlunr.Index.prototype.removeDocByRef = function (docRef, emitEvent) {\n if (!docRef) return;\n if (this.documentStore.isDocStored() === false) {\n return;\n }\n\n if (!this.documentStore.hasDoc(docRef)) return;\n var doc = this.documentStore.getDoc(docRef);\n this.removeDoc(doc, false);\n};\n\n/**\n * Removes a document from the index.\n * This remove operation could work even the original doc is not store in the DocumentStore.\n *\n * To make sure documents no longer show up in search results they can be\n * removed from the index using this method.\n *\n * A 'remove' event is emitted with the document that has been removed and the index\n * the document has been removed from. This event can be silenced by passing false\n * as the second argument to remove.\n *\n *\n * @param {Object} doc The document ref to remove from the index.\n * @param {Boolean} emitEvent Whether to emit remove events, defaults to true\n * @memberOf Index\n */\nelasticlunr.Index.prototype.removeDoc = function (doc, emitEvent) {\n if (!doc) return;\n\n var emitEvent = emitEvent === undefined ? true : emitEvent;\n\n var docRef = doc[this._ref];\n if (!this.documentStore.hasDoc(docRef)) return;\n\n this.documentStore.removeDoc(docRef);\n\n this._fields.forEach(function (field) {\n var fieldTokens = this.pipeline.run(elasticlunr.tokenizer(doc[field]));\n fieldTokens.forEach(function (token) {\n this.index[field].removeToken(token, docRef);\n }, this);\n }, this);\n\n if (emitEvent) this.eventEmitter.emit('remove', doc, this);\n};\n\n/**\n * Updates a document in the index.\n *\n * When a document contained within the index gets updated, fields changed,\n * added or removed, to make sure it correctly matched against search queries,\n * it should be updated in the index.\n *\n * This method is just a wrapper around `remove` and `add`\n *\n * An 'update' event is emitted with the document that has been updated and the index.\n * This event can be silenced by passing false as the second argument to update. Only\n * an update event will be fired, the 'add' and 'remove' events of the underlying calls\n * are silenced.\n *\n * @param {Object} doc The document to update in the index.\n * @param {Boolean} emitEvent Whether to emit update events, defaults to true\n * @see Index.prototype.remove\n * @see Index.prototype.add\n * @memberOf Index\n */\nelasticlunr.Index.prototype.updateDoc = function (doc, emitEvent) {\n var emitEvent = emitEvent === undefined ? true : emitEvent;\n\n this.removeDocByRef(doc[this._ref], false);\n this.addDoc(doc, false);\n\n if (emitEvent) this.eventEmitter.emit('update', doc, this);\n};\n\n/**\n * Calculates the inverse document frequency for a token within the index of a field.\n *\n * @param {String} token The token to calculate the idf of.\n * @param {String} field The field to compute idf.\n * @see Index.prototype.idf\n * @private\n * @memberOf Index\n */\nelasticlunr.Index.prototype.idf = function (term, field) {\n var cacheKey = \"@\" + field + '/' + term;\n if (Object.prototype.hasOwnProperty.call(this._idfCache, cacheKey)) return this._idfCache[cacheKey];\n\n var df = this.index[field].getDocFreq(term);\n var idf = 1 + Math.log(this.documentStore.length / (df + 1));\n this._idfCache[cacheKey] = idf;\n\n return idf;\n};\n\n/**\n * get fields of current index instance\n *\n * @return {Array}\n */\nelasticlunr.Index.prototype.getFields = function () {\n return this._fields.slice();\n};\n\n/**\n * Searches the index using the passed query.\n * Queries should be a string, multiple words are allowed.\n *\n * If config is null, will search all fields defaultly, and lead to OR based query.\n * If config is specified, will search specified with query time boosting.\n *\n * All query tokens are passed through the same pipeline that document tokens\n * are passed through, so any language processing involved will be run on every\n * query term.\n *\n * Each query term is expanded, so that the term 'he' might be expanded to\n * 'hello' and 'help' if those terms were already included in the index.\n *\n * Matching documents are returned as an array of objects, each object contains\n * the matching document ref, as set for this index, and the similarity score\n * for this document against the query.\n *\n * @param {String} query The query to search the index with.\n * @param {JSON} userConfig The user query config, JSON format.\n * @return {Object}\n * @see Index.prototype.idf\n * @see Index.prototype.documentVector\n * @memberOf Index\n */\nelasticlunr.Index.prototype.search = function (query, userConfig) {\n if (!query) return [];\n\n var configStr = null;\n if (userConfig != null) {\n configStr = JSON.stringify(userConfig);\n }\n\n var config = new elasticlunr.Configuration(configStr, this.getFields()).get();\n\n var queryTokens = this.pipeline.run(elasticlunr.tokenizer(query));\n\n var queryResults = {};\n\n for (var field in config) {\n var fieldSearchResults = this.fieldSearch(queryTokens, field, config);\n var fieldBoost = config[field].boost;\n\n for (var docRef in fieldSearchResults) {\n fieldSearchResults[docRef] = fieldSearchResults[docRef] * fieldBoost;\n }\n\n for (var docRef in fieldSearchResults) {\n if (docRef in queryResults) {\n queryResults[docRef] += fieldSearchResults[docRef];\n } else {\n queryResults[docRef] = fieldSearchResults[docRef];\n }\n }\n }\n\n var results = [];\n for (var docRef in queryResults) {\n results.push({ref: docRef, score: queryResults[docRef]});\n }\n\n results.sort(function (a, b) { return b.score - a.score; });\n return results;\n};\n\n/**\n * search queryTokens in specified field.\n *\n * @param {Array} queryTokens The query tokens to query in this field.\n * @param {String} field Field to query in.\n * @param {elasticlunr.Configuration} config The user query config, JSON format.\n * @return {Object}\n */\nelasticlunr.Index.prototype.fieldSearch = function (queryTokens, fieldName, config) {\n var booleanType = config[fieldName].bool;\n var expand = config[fieldName].expand;\n var boost = config[fieldName].boost;\n var scores = null;\n var docTokens = {};\n\n // Do nothing if the boost is 0\n if (boost === 0) {\n return;\n }\n\n queryTokens.forEach(function (token) {\n var tokens = [token];\n if (expand == true) {\n tokens = this.index[fieldName].expandToken(token);\n }\n // Consider every query token in turn. If expanded, each query token\n // corresponds to a set of tokens, which is all tokens in the \n // index matching the pattern queryToken* .\n // For the set of tokens corresponding to a query token, find and score\n // all matching documents. Store those scores in queryTokenScores, \n // keyed by docRef.\n // Then, depending on the value of booleanType, combine the scores\n // for this query token with previous scores. If booleanType is OR,\n // then merge the scores by summing into the accumulated total, adding\n // new document scores are required (effectively a union operator). \n // If booleanType is AND, accumulate scores only if the document \n // has previously been scored by another query token (an intersection\n // operation0. \n // Furthermore, since when booleanType is AND, additional \n // query tokens can't add new documents to the result set, use the\n // current document set to limit the processing of each new query \n // token for efficiency (i.e., incremental intersection).\n \n var queryTokenScores = {};\n tokens.forEach(function (key) {\n var docs = this.index[fieldName].getDocs(key);\n var idf = this.idf(key, fieldName);\n \n if (scores && booleanType == 'AND') {\n // special case, we can rule out documents that have been\n // already been filtered out because they weren't scored\n // by previous query token passes.\n var filteredDocs = {};\n for (var docRef in scores) {\n if (docRef in docs) {\n filteredDocs[docRef] = docs[docRef];\n }\n }\n docs = filteredDocs;\n }\n // only record appeared token for retrieved documents for the\n // original token, not for expaned token.\n // beause for doing coordNorm for a retrieved document, coordNorm only care how many\n // query token appear in that document.\n // so expanded token should not be added into docTokens, if added, this will pollute the\n // coordNorm\n if (key == token) {\n this.fieldSearchStats(docTokens, key, docs);\n }\n\n for (var docRef in docs) {\n var tf = this.index[fieldName].getTermFrequency(key, docRef);\n var fieldLength = this.documentStore.getFieldLength(docRef, fieldName);\n var fieldLengthNorm = 1;\n if (fieldLength != 0) {\n fieldLengthNorm = 1 / Math.sqrt(fieldLength);\n }\n\n var penality = 1;\n if (key != token) {\n // currently I'm not sure if this penality is enough,\n // need to do verification\n penality = (1 - (key.length - token.length) / key.length) * 0.15;\n }\n\n var score = tf * idf * fieldLengthNorm * penality;\n\n if (docRef in queryTokenScores) {\n queryTokenScores[docRef] += score;\n } else {\n queryTokenScores[docRef] = score;\n }\n }\n }, this);\n \n scores = this.mergeScores(scores, queryTokenScores, booleanType);\n }, this);\n\n scores = this.coordNorm(scores, docTokens, queryTokens.length);\n return scores;\n};\n\n/**\n * Merge the scores from one set of tokens into an accumulated score table.\n * Exact operation depends on the op parameter. If op is 'AND', then only the\n * intersection of the two score lists is retained. Otherwise, the union of\n * the two score lists is returned. For internal use only.\n *\n * @param {Object} bool accumulated scores. Should be null on first call.\n * @param {String} scores new scores to merge into accumScores.\n * @param {Object} op merge operation (should be 'AND' or 'OR').\n *\n */\n\nelasticlunr.Index.prototype.mergeScores = function (accumScores, scores, op) {\n if (!accumScores) {\n return scores; \n }\n if (op == 'AND') {\n var intersection = {};\n for (var docRef in scores) {\n if (docRef in accumScores) {\n intersection[docRef] = accumScores[docRef] + scores[docRef];\n }\n }\n return intersection;\n } else {\n for (var docRef in scores) {\n if (docRef in accumScores) {\n accumScores[docRef] += scores[docRef];\n } else {\n accumScores[docRef] = scores[docRef];\n }\n }\n return accumScores;\n }\n};\n\n\n/**\n * Record the occuring query token of retrieved doc specified by doc field.\n * Only for inner user.\n *\n * @param {Object} docTokens a data structure stores which token appears in the retrieved doc.\n * @param {String} token query token\n * @param {Object} docs the retrieved documents of the query token\n *\n */\nelasticlunr.Index.prototype.fieldSearchStats = function (docTokens, token, docs) {\n for (var doc in docs) {\n if (doc in docTokens) {\n docTokens[doc].push(token);\n } else {\n docTokens[doc] = [token];\n }\n }\n};\n\n/**\n * coord norm the score of a doc.\n * if a doc contain more query tokens, then the score will larger than the doc\n * contains less query tokens.\n *\n * only for inner use.\n *\n * @param {Object} results first results\n * @param {Object} docs field search results of a token\n * @param {Integer} n query token number\n * @return {Object}\n */\nelasticlunr.Index.prototype.coordNorm = function (scores, docTokens, n) {\n for (var doc in scores) {\n if (!(doc in docTokens)) continue;\n var tokens = docTokens[doc].length;\n scores[doc] = scores[doc] * tokens / n;\n }\n\n return scores;\n};\n\n/**\n * Returns a representation of the index ready for serialisation.\n *\n * @return {Object}\n * @memberOf Index\n */\nelasticlunr.Index.prototype.toJSON = function () {\n var indexJson = {};\n this._fields.forEach(function (field) {\n indexJson[field] = this.index[field].toJSON();\n }, this);\n\n return {\n version: elasticlunr.version,\n fields: this._fields,\n ref: this._ref,\n documentStore: this.documentStore.toJSON(),\n index: indexJson,\n pipeline: this.pipeline.toJSON()\n };\n};\n\n/**\n * Applies a plugin to the current index.\n *\n * A plugin is a function that is called with the index as its context.\n * Plugins can be used to customise or extend the behaviour the index\n * in some way. A plugin is just a function, that encapsulated the custom\n * behaviour that should be applied to the index.\n *\n * The plugin function will be called with the index as its argument, additional\n * arguments can also be passed when calling use. The function will be called\n * with the index as its context.\n *\n * Example:\n *\n * var myPlugin = function (idx, arg1, arg2) {\n * // `this` is the index to be extended\n * // apply any extensions etc here.\n * }\n *\n * var idx = elasticlunr(function () {\n * this.use(myPlugin, 'arg1', 'arg2')\n * })\n *\n * @param {Function} plugin The plugin to apply.\n * @memberOf Index\n */\nelasticlunr.Index.prototype.use = function (plugin) {\n var args = Array.prototype.slice.call(arguments, 1);\n args.unshift(this);\n plugin.apply(this, args);\n};\n/*!\n * elasticlunr.DocumentStore\n * Copyright (C) 2016 Wei Song\n */\n\n/**\n * elasticlunr.DocumentStore is a simple key-value document store used for storing sets of tokens for\n * documents stored in index.\n *\n * elasticlunr.DocumentStore store original JSON format documents that you could build search snippet by this original JSON document.\n *\n * user could choose whether original JSON format document should be store, if no configuration then document will be stored defaultly.\n * If user care more about the index size, user could select not store JSON documents, then this will has some defects, such as user\n * could not use JSON document to generate snippets of search results.\n *\n * @param {Boolean} save If the original JSON document should be stored.\n * @constructor\n * @module\n */\nelasticlunr.DocumentStore = function (save) {\n if (save === null || save === undefined) {\n this._save = true;\n } else {\n this._save = save;\n }\n\n this.docs = {};\n this.docInfo = {};\n this.length = 0;\n};\n\n/**\n * Loads a previously serialised document store\n *\n * @param {Object} serialisedData The serialised document store to load.\n * @return {elasticlunr.DocumentStore}\n */\nelasticlunr.DocumentStore.load = function (serialisedData) {\n var store = new this;\n\n store.length = serialisedData.length;\n store.docs = serialisedData.docs;\n store.docInfo = serialisedData.docInfo;\n store._save = serialisedData.save;\n\n return store;\n};\n\n/**\n * check if current instance store the original doc\n *\n * @return {Boolean}\n */\nelasticlunr.DocumentStore.prototype.isDocStored = function () {\n return this._save;\n};\n\n/**\n * Stores the given doc in the document store against the given id.\n * If docRef already exist, then update doc.\n *\n * Document is store by original JSON format, then you could use original document to generate search snippets.\n *\n * @param {Integer|String} docRef The key used to store the JSON format doc.\n * @param {Object} doc The JSON format doc.\n */\nelasticlunr.DocumentStore.prototype.addDoc = function (docRef, doc) {\n if (!this.hasDoc(docRef)) this.length++;\n\n if (this._save === true) {\n this.docs[docRef] = clone(doc);\n } else {\n this.docs[docRef] = null;\n }\n};\n\n/**\n * Retrieves the JSON doc from the document store for a given key.\n *\n * If docRef not found, return null.\n * If user set not storing the documents, return null.\n *\n * @param {Integer|String} docRef The key to lookup and retrieve from the document store.\n * @return {Object}\n * @memberOf DocumentStore\n */\nelasticlunr.DocumentStore.prototype.getDoc = function (docRef) {\n if (this.hasDoc(docRef) === false) return null;\n return this.docs[docRef];\n};\n\n/**\n * Checks whether the document store contains a key (docRef).\n *\n * @param {Integer|String} docRef The id to look up in the document store.\n * @return {Boolean}\n * @memberOf DocumentStore\n */\nelasticlunr.DocumentStore.prototype.hasDoc = function (docRef) {\n return docRef in this.docs;\n};\n\n/**\n * Removes the value for a key in the document store.\n *\n * @param {Integer|String} docRef The id to remove from the document store.\n * @memberOf DocumentStore\n */\nelasticlunr.DocumentStore.prototype.removeDoc = function (docRef) {\n if (!this.hasDoc(docRef)) return;\n\n delete this.docs[docRef];\n delete this.docInfo[docRef];\n this.length--;\n};\n\n/**\n * Add field length of a document's field tokens from pipeline results.\n * The field length of a document is used to do field length normalization even without the original JSON document stored.\n *\n * @param {Integer|String} docRef document's id or reference\n * @param {String} fieldName field name\n * @param {Integer} length field length\n */\nelasticlunr.DocumentStore.prototype.addFieldLength = function (docRef, fieldName, length) {\n if (docRef === null || docRef === undefined) return;\n if (this.hasDoc(docRef) == false) return;\n\n if (!this.docInfo[docRef]) this.docInfo[docRef] = {};\n this.docInfo[docRef][fieldName] = length;\n};\n\n/**\n * Update field length of a document's field tokens from pipeline results.\n * The field length of a document is used to do field length normalization even without the original JSON document stored.\n *\n * @param {Integer|String} docRef document's id or reference\n * @param {String} fieldName field name\n * @param {Integer} length field length\n */\nelasticlunr.DocumentStore.prototype.updateFieldLength = function (docRef, fieldName, length) {\n if (docRef === null || docRef === undefined) return;\n if (this.hasDoc(docRef) == false) return;\n\n this.addFieldLength(docRef, fieldName, length);\n};\n\n/**\n * get field length of a document by docRef\n *\n * @param {Integer|String} docRef document id or reference\n * @param {String} fieldName field name\n * @return {Integer} field length\n */\nelasticlunr.DocumentStore.prototype.getFieldLength = function (docRef, fieldName) {\n if (docRef === null || docRef === undefined) return 0;\n\n if (!(docRef in this.docs)) return 0;\n if (!(fieldName in this.docInfo[docRef])) return 0;\n return this.docInfo[docRef][fieldName];\n};\n\n/**\n * Returns a JSON representation of the document store used for serialisation.\n *\n * @return {Object} JSON format\n * @memberOf DocumentStore\n */\nelasticlunr.DocumentStore.prototype.toJSON = function () {\n return {\n docs: this.docs,\n docInfo: this.docInfo,\n length: this.length,\n save: this._save\n };\n};\n\n/**\n * Cloning object\n *\n * @param {Object} object in JSON format\n * @return {Object} copied object\n */\nfunction clone(obj) {\n if (null === obj || \"object\" !== typeof obj) return obj;\n\n var copy = obj.constructor();\n\n for (var attr in obj) {\n if (obj.hasOwnProperty(attr)) copy[attr] = obj[attr];\n }\n\n return copy;\n}\n/*!\n * elasticlunr.stemmer\n * Copyright (C) 2016 Oliver Nightingale\n * Copyright (C) 2016 Wei Song\n * Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt\n */\n\n/**\n * elasticlunr.stemmer is an english language stemmer, this is a JavaScript\n * implementation of the PorterStemmer taken from http://tartarus.org/~martin\n *\n * @module\n * @param {String} str The string to stem\n * @return {String}\n * @see elasticlunr.Pipeline\n */\nelasticlunr.stemmer = (function(){\n var step2list = {\n \"ational\" : \"ate\",\n \"tional\" : \"tion\",\n \"enci\" : \"ence\",\n \"anci\" : \"ance\",\n \"izer\" : \"ize\",\n \"bli\" : \"ble\",\n \"alli\" : \"al\",\n \"entli\" : \"ent\",\n \"eli\" : \"e\",\n \"ousli\" : \"ous\",\n \"ization\" : \"ize\",\n \"ation\" : \"ate\",\n \"ator\" : \"ate\",\n \"alism\" : \"al\",\n \"iveness\" : \"ive\",\n \"fulness\" : \"ful\",\n \"ousness\" : \"ous\",\n \"aliti\" : \"al\",\n \"iviti\" : \"ive\",\n \"biliti\" : \"ble\",\n \"logi\" : \"log\"\n },\n\n step3list = {\n \"icate\" : \"ic\",\n \"ative\" : \"\",\n \"alize\" : \"al\",\n \"iciti\" : \"ic\",\n \"ical\" : \"ic\",\n \"ful\" : \"\",\n \"ness\" : \"\"\n },\n\n c = \"[^aeiou]\", // consonant\n v = \"[aeiouy]\", // vowel\n C = c + \"[^aeiouy]*\", // consonant sequence\n V = v + \"[aeiou]*\", // vowel sequence\n\n mgr0 = \"^(\" + C + \")?\" + V + C, // [C]VC... is m>0\n meq1 = \"^(\" + C + \")?\" + V + C + \"(\" + V + \")?$\", // [C]VC[V] is m=1\n mgr1 = \"^(\" + C + \")?\" + V + C + V + C, // [C]VCVC... is m>1\n s_v = \"^(\" + C + \")?\" + v; // vowel in stem\n\n var re_mgr0 = new RegExp(mgr0);\n var re_mgr1 = new RegExp(mgr1);\n var re_meq1 = new RegExp(meq1);\n var re_s_v = new RegExp(s_v);\n\n var re_1a = /^(.+?)(ss|i)es$/;\n var re2_1a = /^(.+?)([^s])s$/;\n var re_1b = /^(.+?)eed$/;\n var re2_1b = /^(.+?)(ed|ing)$/;\n var re_1b_2 = /.$/;\n var re2_1b_2 = /(at|bl|iz)$/;\n var re3_1b_2 = new RegExp(\"([^aeiouylsz])\\\\1$\");\n var re4_1b_2 = new RegExp(\"^\" + C + v + \"[^aeiouwxy]$\");\n\n var re_1c = /^(.+?[^aeiou])y$/;\n var re_2 = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;\n\n var re_3 = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;\n\n var re_4 = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;\n var re2_4 = /^(.+?)(s|t)(ion)$/;\n\n var re_5 = /^(.+?)e$/;\n var re_5_1 = /ll$/;\n var re3_5 = new RegExp(\"^\" + C + v + \"[^aeiouwxy]$\");\n\n var porterStemmer = function porterStemmer(w) {\n var stem,\n suffix,\n firstch,\n re,\n re2,\n re3,\n re4;\n\n if (w.length < 3) { return w; }\n\n firstch = w.substr(0,1);\n if (firstch == \"y\") {\n w = firstch.toUpperCase() + w.substr(1);\n }\n\n // Step 1a\n re = re_1a\n re2 = re2_1a;\n\n if (re.test(w)) { w = w.replace(re,\"$1$2\"); }\n else if (re2.test(w)) { w = w.replace(re2,\"$1$2\"); }\n\n // Step 1b\n re = re_1b;\n re2 = re2_1b;\n if (re.test(w)) {\n var fp = re.exec(w);\n re = re_mgr0;\n if (re.test(fp[1])) {\n re = re_1b_2;\n w = w.replace(re,\"\");\n }\n } else if (re2.test(w)) {\n var fp = re2.exec(w);\n stem = fp[1];\n re2 = re_s_v;\n if (re2.test(stem)) {\n w = stem;\n re2 = re2_1b_2;\n re3 = re3_1b_2;\n re4 = re4_1b_2;\n if (re2.test(w)) { w = w + \"e\"; }\n else if (re3.test(w)) { re = re_1b_2; w = w.replace(re,\"\"); }\n else if (re4.test(w)) { w = w + \"e\"; }\n }\n }\n\n // Step 1c - replace suffix y or Y by i if preceded by a non-vowel which is not the first letter of the word (so cry -> cri, by -> by, say -> say)\n re = re_1c;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n w = stem + \"i\";\n }\n\n // Step 2\n re = re_2;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n suffix = fp[2];\n re = re_mgr0;\n if (re.test(stem)) {\n w = stem + step2list[suffix];\n }\n }\n\n // Step 3\n re = re_3;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n suffix = fp[2];\n re = re_mgr0;\n if (re.test(stem)) {\n w = stem + step3list[suffix];\n }\n }\n\n // Step 4\n re = re_4;\n re2 = re2_4;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n re = re_mgr1;\n if (re.test(stem)) {\n w = stem;\n }\n } else if (re2.test(w)) {\n var fp = re2.exec(w);\n stem = fp[1] + fp[2];\n re2 = re_mgr1;\n if (re2.test(stem)) {\n w = stem;\n }\n }\n\n // Step 5\n re = re_5;\n if (re.test(w)) {\n var fp = re.exec(w);\n stem = fp[1];\n re = re_mgr1;\n re2 = re_meq1;\n re3 = re3_5;\n if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) {\n w = stem;\n }\n }\n\n re = re_5_1;\n re2 = re_mgr1;\n if (re.test(w) && re2.test(w)) {\n re = re_1b_2;\n w = w.replace(re,\"\");\n }\n\n // and turn initial Y back to y\n\n if (firstch == \"y\") {\n w = firstch.toLowerCase() + w.substr(1);\n }\n\n return w;\n };\n\n return porterStemmer;\n})();\n\nelasticlunr.Pipeline.registerFunction(elasticlunr.stemmer, 'stemmer');\n/*!\n * elasticlunr.stopWordFilter\n * Copyright (C) 2016 Oliver Nightingale\n * Copyright (C) 2016 Wei Song\n */\n\n/**\n * elasticlunr.stopWordFilter is an English language stop words filter, any words\n * contained in the stop word list will not be passed through the filter.\n *\n * This is intended to be used in the Pipeline. If the token does not pass the\n * filter then undefined will be returned.\n * Currently this StopwordFilter using dictionary to do O(1) time complexity stop word filtering.\n *\n * @module\n * @param {String} token The token to pass through the filter\n * @return {String}\n * @see elasticlunr.Pipeline\n */\nelasticlunr.stopWordFilter = function (token) {\n if (token && elasticlunr.stopWordFilter.stopWords[token] !== true) {\n return token;\n }\n};\n\n/**\n * Remove predefined stop words\n * if user want to use customized stop words, user could use this function to delete\n * all predefined stopwords.\n *\n * @return {null}\n */\nelasticlunr.clearStopWords = function () {\n elasticlunr.stopWordFilter.stopWords = {};\n};\n\n/**\n * Add customized stop words\n * user could use this function to add customized stop words\n * \n * @params {Array} words customized stop words\n * @return {null}\n */\nelasticlunr.addStopWords = function (words) {\n if (words == null || Array.isArray(words) === false) return;\n\n words.forEach(function (word) {\n elasticlunr.stopWordFilter.stopWords[word] = true;\n }, this);\n};\n\n/**\n * Reset to default stop words\n * user could use this function to restore default stop words\n *\n * @return {null}\n */\nelasticlunr.resetStopWords = function () {\n elasticlunr.stopWordFilter.stopWords = elasticlunr.defaultStopWords;\n};\n\nelasticlunr.defaultStopWords = {\n \"\": true,\n \"a\": true,\n \"able\": true,\n \"about\": true,\n \"across\": true,\n \"after\": true,\n \"all\": true,\n \"almost\": true,\n \"also\": true,\n \"am\": true,\n \"among\": true,\n \"an\": true,\n \"and\": true,\n \"any\": true,\n \"are\": true,\n \"as\": true,\n \"at\": true,\n \"be\": true,\n \"because\": true,\n \"been\": true,\n \"but\": true,\n \"by\": true,\n \"can\": true,\n \"cannot\": true,\n \"could\": true,\n \"dear\": true,\n \"did\": true,\n \"do\": true,\n \"does\": true,\n \"either\": true,\n \"else\": true,\n \"ever\": true,\n \"every\": true,\n \"for\": true,\n \"from\": true,\n \"get\": true,\n \"got\": true,\n \"had\": true,\n \"has\": true,\n \"have\": true,\n \"he\": true,\n \"her\": true,\n \"hers\": true,\n \"him\": true,\n \"his\": true,\n \"how\": true,\n \"however\": true,\n \"i\": true,\n \"if\": true,\n \"in\": true,\n \"into\": true,\n \"is\": true,\n \"it\": true,\n \"its\": true,\n \"just\": true,\n \"least\": true,\n \"let\": true,\n \"like\": true,\n \"likely\": true,\n \"may\": true,\n \"me\": true,\n \"might\": true,\n \"most\": true,\n \"must\": true,\n \"my\": true,\n \"neither\": true,\n \"no\": true,\n \"nor\": true,\n \"not\": true,\n \"of\": true,\n \"off\": true,\n \"often\": true,\n \"on\": true,\n \"only\": true,\n \"or\": true,\n \"other\": true,\n \"our\": true,\n \"own\": true,\n \"rather\": true,\n \"said\": true,\n \"say\": true,\n \"says\": true,\n \"she\": true,\n \"should\": true,\n \"since\": true,\n \"so\": true,\n \"some\": true,\n \"than\": true,\n \"that\": true,\n \"the\": true,\n \"their\": true,\n \"them\": true,\n \"then\": true,\n \"there\": true,\n \"these\": true,\n \"they\": true,\n \"this\": true,\n \"tis\": true,\n \"to\": true,\n \"too\": true,\n \"twas\": true,\n \"us\": true,\n \"wants\": true,\n \"was\": true,\n \"we\": true,\n \"were\": true,\n \"what\": true,\n \"when\": true,\n \"where\": true,\n \"which\": true,\n \"while\": true,\n \"who\": true,\n \"whom\": true,\n \"why\": true,\n \"will\": true,\n \"with\": true,\n \"would\": true,\n \"yet\": true,\n \"you\": true,\n \"your\": true\n};\n\nelasticlunr.stopWordFilter.stopWords = elasticlunr.defaultStopWords;\n\nelasticlunr.Pipeline.registerFunction(elasticlunr.stopWordFilter, 'stopWordFilter');\n/*!\n * elasticlunr.trimmer\n * Copyright (C) 2016 Oliver Nightingale\n * Copyright (C) 2016 Wei Song\n */\n\n/**\n * elasticlunr.trimmer is a pipeline function for trimming non word\n * characters from the begining and end of tokens before they\n * enter the index.\n *\n * This implementation may not work correctly for non latin\n * characters and should either be removed or adapted for use\n * with languages with non-latin characters.\n *\n * @module\n * @param {String} token The token to pass through the filter\n * @return {String}\n * @see elasticlunr.Pipeline\n */\nelasticlunr.trimmer = function (token) {\n if (token === null || token === undefined) {\n throw new Error('token should not be undefined');\n }\n\n return token\n .replace(/^\\W+/, '')\n .replace(/\\W+$/, '');\n};\n\nelasticlunr.Pipeline.registerFunction(elasticlunr.trimmer, 'trimmer');\n/*!\n * elasticlunr.InvertedIndex\n * Copyright (C) 2016 Wei Song\n * Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt\n */\n\n/**\n * elasticlunr.InvertedIndex is used for efficiently storing and\n * lookup of documents that contain a given token.\n *\n * @constructor\n */\nelasticlunr.InvertedIndex = function () {\n this.root = { docs: {}, df: 0 };\n};\n\n/**\n * Loads a previously serialised inverted index.\n *\n * @param {Object} serialisedData The serialised inverted index to load.\n * @return {elasticlunr.InvertedIndex}\n */\nelasticlunr.InvertedIndex.load = function (serialisedData) {\n var idx = new this;\n idx.root = serialisedData.root;\n\n return idx;\n};\n\n/**\n * Adds a {token: tokenInfo} pair to the inverted index.\n * If the token already exist, then update the tokenInfo.\n *\n * tokenInfo format: { ref: 1, tf: 2}\n * tokenInfor should contains the document's ref and the tf(token frequency) of that token in\n * the document.\n *\n * By default this function starts at the root of the current inverted index, however\n * it can start at any node of the inverted index if required.\n *\n * @param {String} token \n * @param {Object} tokenInfo format: { ref: 1, tf: 2}\n * @param {Object} root An optional node at which to start looking for the\n * correct place to enter the doc, by default the root of this elasticlunr.InvertedIndex\n * is used.\n * @memberOf InvertedIndex\n */\nelasticlunr.InvertedIndex.prototype.addToken = function (token, tokenInfo, root) {\n var root = root || this.root,\n idx = 0;\n\n while (idx <= token.length - 1) {\n var key = token[idx];\n\n if (!(key in root)) root[key] = {docs: {}, df: 0};\n idx += 1;\n root = root[key];\n }\n\n var docRef = tokenInfo.ref;\n if (!root.docs[docRef]) {\n // if this doc not exist, then add this doc\n root.docs[docRef] = {tf: tokenInfo.tf};\n root.df += 1;\n } else {\n // if this doc already exist, then update tokenInfo\n root.docs[docRef] = {tf: tokenInfo.tf};\n }\n};\n\n/**\n * Checks whether a token is in this elasticlunr.InvertedIndex.\n * \n *\n * @param {String} token The token to be checked\n * @return {Boolean}\n * @memberOf InvertedIndex\n */\nelasticlunr.InvertedIndex.prototype.hasToken = function (token) {\n if (!token) return false;\n\n var node = this.root;\n\n for (var i = 0; i < token.length; i++) {\n if (!node[token[i]]) return false;\n node = node[token[i]];\n }\n\n return true;\n};\n\n/**\n * Retrieve a node from the inverted index for a given token.\n * If token not found in this InvertedIndex, return null.\n * \n *\n * @param {String} token The token to get the node for.\n * @return {Object}\n * @see InvertedIndex.prototype.get\n * @memberOf InvertedIndex\n */\nelasticlunr.InvertedIndex.prototype.getNode = function (token) {\n if (!token) return null;\n\n var node = this.root;\n\n for (var i = 0; i < token.length; i++) {\n if (!node[token[i]]) return null;\n node = node[token[i]];\n }\n\n return node;\n};\n\n/**\n * Retrieve the documents of a given token.\n * If token not found, return {}.\n *\n *\n * @param {String} token The token to get the documents for.\n * @return {Object}\n * @memberOf InvertedIndex\n */\nelasticlunr.InvertedIndex.prototype.getDocs = function (token) {\n var node = this.getNode(token);\n if (node == null) {\n return {};\n }\n\n return node.docs;\n};\n\n/**\n * Retrieve term frequency of given token in given docRef.\n * If token or docRef not found, return 0.\n *\n *\n * @param {String} token The token to get the documents for.\n * @param {String|Integer} docRef\n * @return {Integer}\n * @memberOf InvertedIndex\n */\nelasticlunr.InvertedIndex.prototype.getTermFrequency = function (token, docRef) {\n var node = this.getNode(token);\n\n if (node == null) {\n return 0;\n }\n\n if (!(docRef in node.docs)) {\n return 0;\n }\n\n return node.docs[docRef].tf;\n};\n\n/**\n * Retrieve the document frequency of given token.\n * If token not found, return 0.\n *\n *\n * @param {String} token The token to get the documents for.\n * @return {Object}\n * @memberOf InvertedIndex\n */\nelasticlunr.InvertedIndex.prototype.getDocFreq = function (token) {\n var node = this.getNode(token);\n\n if (node == null) {\n return 0;\n }\n\n return node.df;\n};\n\n/**\n * Remove the document identified by document's ref from the token in the inverted index.\n *\n *\n * @param {String} token Remove the document from which token.\n * @param {String} ref The ref of the document to remove from given token.\n * @memberOf InvertedIndex\n */\nelasticlunr.InvertedIndex.prototype.removeToken = function (token, ref) {\n if (!token) return;\n var node = this.getNode(token);\n\n if (node == null) return;\n\n if (ref in node.docs) {\n delete node.docs[ref];\n node.df -= 1;\n }\n};\n\n/**\n * Find all the possible suffixes of given token using tokens currently in the inverted index.\n * If token not found, return empty Array.\n *\n * @param {String} token The token to expand.\n * @return {Array}\n * @memberOf InvertedIndex\n */\nelasticlunr.InvertedIndex.prototype.expandToken = function (token, memo, root) {\n if (token == null || token == '') return [];\n var memo = memo || [];\n\n if (root == void 0) {\n root = this.getNode(token);\n if (root == null) return memo;\n }\n\n if (root.df > 0) memo.push(token);\n\n for (var key in root) {\n if (key === 'docs') continue;\n if (key === 'df') continue;\n this.expandToken(token + key, memo, root[key]);\n }\n\n return memo;\n};\n\n/**\n * Returns a representation of the inverted index ready for serialisation.\n *\n * @return {Object}\n * @memberOf InvertedIndex\n */\nelasticlunr.InvertedIndex.prototype.toJSON = function () {\n return {\n root: this.root\n };\n};\n\n/*!\n * elasticlunr.Configuration\n * Copyright (C) 2016 Wei Song\n */\n \n /** \n * elasticlunr.Configuration is used to analyze the user search configuration.\n * \n * By elasticlunr.Configuration user could set query-time boosting, boolean model in each field.\n * \n * Currently configuration supports:\n * 1. query-time boosting, user could set how to boost each field.\n * 2. boolean model chosing, user could choose which boolean model to use for each field.\n * 3. token expandation, user could set token expand to True to improve Recall. Default is False.\n * \n * Query time boosting must be configured by field category, \"boolean\" model could be configured \n * by both field category or globally as the following example. Field configuration for \"boolean\"\n * will overwrite global configuration.\n * Token expand could be configured both by field category or golbally. Local field configuration will\n * overwrite global configuration.\n * \n * configuration example:\n * {\n * fields:{ \n * title: {boost: 2},\n * body: {boost: 1}\n * },\n * bool: \"OR\"\n * }\n * \n * \"bool\" field configuation overwrite global configuation example:\n * {\n * fields:{ \n * title: {boost: 2, bool: \"AND\"},\n * body: {boost: 1}\n * },\n * bool: \"OR\"\n * }\n * \n * \"expand\" example:\n * {\n * fields:{ \n * title: {boost: 2, bool: \"AND\"},\n * body: {boost: 1}\n * },\n * bool: \"OR\",\n * expand: true\n * }\n * \n * \"expand\" example for field category:\n * {\n * fields:{ \n * title: {boost: 2, bool: \"AND\", expand: true},\n * body: {boost: 1}\n * },\n * bool: \"OR\"\n * }\n * \n * setting the boost to 0 ignores the field (this will only search the title):\n * {\n * fields:{\n * title: {boost: 1},\n * body: {boost: 0}\n * }\n * }\n *\n * then, user could search with configuration to do query-time boosting.\n * idx.search('oracle database', {fields: {title: {boost: 2}, body: {boost: 1}}});\n * \n * \n * @constructor\n * \n * @param {String} config user configuration\n * @param {Array} fields fields of index instance\n * @module\n */\nelasticlunr.Configuration = function (config, fields) {\n var config = config || '';\n\n if (fields == undefined || fields == null) {\n throw new Error('fields should not be null');\n }\n\n this.config = {};\n\n var userConfig;\n try {\n userConfig = JSON.parse(config);\n this.buildUserConfig(userConfig, fields);\n } catch (error) {\n elasticlunr.utils.warn('user configuration parse failed, will use default configuration');\n this.buildDefaultConfig(fields);\n }\n};\n\n/**\n * Build default search configuration.\n * \n * @param {Array} fields fields of index instance\n */\nelasticlunr.Configuration.prototype.buildDefaultConfig = function (fields) {\n this.reset();\n fields.forEach(function (field) {\n this.config[field] = {\n boost: 1,\n bool: \"OR\",\n expand: false\n };\n }, this);\n};\n\n/**\n * Build user configuration.\n * \n * @param {JSON} config User JSON configuratoin\n * @param {Array} fields fields of index instance\n */\nelasticlunr.Configuration.prototype.buildUserConfig = function (config, fields) {\n var global_bool = \"OR\";\n var global_expand = false;\n\n this.reset();\n if ('bool' in config) {\n global_bool = config['bool'] || global_bool;\n }\n\n if ('expand' in config) {\n global_expand = config['expand'] || global_expand;\n }\n\n if ('fields' in config) {\n for (var field in config['fields']) {\n if (fields.indexOf(field) > -1) {\n var field_config = config['fields'][field];\n var field_expand = global_expand;\n if (field_config.expand != undefined) {\n field_expand = field_config.expand;\n }\n\n this.config[field] = {\n boost: (field_config.boost || field_config.boost === 0) ? field_config.boost : 1,\n bool: field_config.bool || global_bool,\n expand: field_expand\n };\n } else {\n elasticlunr.utils.warn('field name in user configuration not found in index instance fields');\n }\n }\n } else {\n this.addAllFields2UserConfig(global_bool, global_expand, fields);\n }\n};\n\n/**\n * Add all fields to user search configuration.\n * \n * @param {String} bool Boolean model\n * @param {String} expand Expand model\n * @param {Array} fields fields of index instance\n */\nelasticlunr.Configuration.prototype.addAllFields2UserConfig = function (bool, expand, fields) {\n fields.forEach(function (field) {\n this.config[field] = {\n boost: 1,\n bool: bool,\n expand: expand\n };\n }, this);\n};\n\n/**\n * get current user configuration\n */\nelasticlunr.Configuration.prototype.get = function () {\n return this.config;\n};\n\n/**\n * reset user search configuration.\n */\nelasticlunr.Configuration.prototype.reset = function () {\n this.config = {};\n};\n/**\n * sorted_set.js is added only to make elasticlunr.js compatible with lunr-languages.\n * if elasticlunr.js support different languages by default, this will make elasticlunr.js\n * much bigger that not good for browser usage.\n *\n */\n\n\n/*!\n * lunr.SortedSet\n * Copyright (C) 2016 Oliver Nightingale\n */\n\n/**\n * lunr.SortedSets are used to maintain an array of uniq values in a sorted\n * order.\n *\n * @constructor\n */\nlunr.SortedSet = function () {\n this.length = 0\n this.elements = []\n}\n\n/**\n * Loads a previously serialised sorted set.\n *\n * @param {Array} serialisedData The serialised set to load.\n * @returns {lunr.SortedSet}\n * @memberOf SortedSet\n */\nlunr.SortedSet.load = function (serialisedData) {\n var set = new this\n\n set.elements = serialisedData\n set.length = serialisedData.length\n\n return set\n}\n\n/**\n * Inserts new items into the set in the correct position to maintain the\n * order.\n *\n * @param {Object} The objects to add to this set.\n * @memberOf SortedSet\n */\nlunr.SortedSet.prototype.add = function () {\n var i, element\n\n for (i = 0; i < arguments.length; i++) {\n element = arguments[i]\n if (~this.indexOf(element)) continue\n this.elements.splice(this.locationFor(element), 0, element)\n }\n\n this.length = this.elements.length\n}\n\n/**\n * Converts this sorted set into an array.\n *\n * @returns {Array}\n * @memberOf SortedSet\n */\nlunr.SortedSet.prototype.toArray = function () {\n return this.elements.slice()\n}\n\n/**\n * Creates a new array with the results of calling a provided function on every\n * element in this sorted set.\n *\n * Delegates to Array.prototype.map and has the same signature.\n *\n * @param {Function} fn The function that is called on each element of the\n * set.\n * @param {Object} ctx An optional object that can be used as the context\n * for the function fn.\n * @returns {Array}\n * @memberOf SortedSet\n */\nlunr.SortedSet.prototype.map = function (fn, ctx) {\n return this.elements.map(fn, ctx)\n}\n\n/**\n * Executes a provided function once per sorted set element.\n *\n * Delegates to Array.prototype.forEach and has the same signature.\n *\n * @param {Function} fn The function that is called on each element of the\n * set.\n * @param {Object} ctx An optional object that can be used as the context\n * @memberOf SortedSet\n * for the function fn.\n */\nlunr.SortedSet.prototype.forEach = function (fn, ctx) {\n return this.elements.forEach(fn, ctx)\n}\n\n/**\n * Returns the index at which a given element can be found in the\n * sorted set, or -1 if it is not present.\n *\n * @param {Object} elem The object to locate in the sorted set.\n * @returns {Number}\n * @memberOf SortedSet\n */\nlunr.SortedSet.prototype.indexOf = function (elem) {\n var start = 0,\n end = this.elements.length,\n sectionLength = end - start,\n pivot = start + Math.floor(sectionLength / 2),\n pivotElem = this.elements[pivot]\n\n while (sectionLength > 1) {\n if (pivotElem === elem) return pivot\n\n if (pivotElem < elem) start = pivot\n if (pivotElem > elem) end = pivot\n\n sectionLength = end - start\n pivot = start + Math.floor(sectionLength / 2)\n pivotElem = this.elements[pivot]\n }\n\n if (pivotElem === elem) return pivot\n\n return -1\n}\n\n/**\n * Returns the position within the sorted set that an element should be\n * inserted at to maintain the current order of the set.\n *\n * This function assumes that the element to search for does not already exist\n * in the sorted set.\n *\n * @param {Object} elem The elem to find the position for in the set\n * @returns {Number}\n * @memberOf SortedSet\n */\nlunr.SortedSet.prototype.locationFor = function (elem) {\n var start = 0,\n end = this.elements.length,\n sectionLength = end - start,\n pivot = start + Math.floor(sectionLength / 2),\n pivotElem = this.elements[pivot]\n\n while (sectionLength > 1) {\n if (pivotElem < elem) start = pivot\n if (pivotElem > elem) end = pivot\n\n sectionLength = end - start\n pivot = start + Math.floor(sectionLength / 2)\n pivotElem = this.elements[pivot]\n }\n\n if (pivotElem > elem) return pivot\n if (pivotElem < elem) return pivot + 1\n}\n\n/**\n * Creates a new lunr.SortedSet that contains the elements in the intersection\n * of this set and the passed set.\n *\n * @param {lunr.SortedSet} otherSet The set to intersect with this set.\n * @returns {lunr.SortedSet}\n * @memberOf SortedSet\n */\nlunr.SortedSet.prototype.intersect = function (otherSet) {\n var intersectSet = new lunr.SortedSet,\n i = 0, j = 0,\n a_len = this.length, b_len = otherSet.length,\n a = this.elements, b = otherSet.elements\n\n while (true) {\n if (i > a_len - 1 || j > b_len - 1) break\n\n if (a[i] === b[j]) {\n intersectSet.add(a[i])\n i++, j++\n continue\n }\n\n if (a[i] < b[j]) {\n i++\n continue\n }\n\n if (a[i] > b[j]) {\n j++\n continue\n }\n };\n\n return intersectSet\n}\n\n/**\n * Makes a copy of this set\n *\n * @returns {lunr.SortedSet}\n * @memberOf SortedSet\n */\nlunr.SortedSet.prototype.clone = function () {\n var clone = new lunr.SortedSet\n\n clone.elements = this.toArray()\n clone.length = clone.elements.length\n\n return clone\n}\n\n/**\n * Creates a new lunr.SortedSet that contains the elements in the union\n * of this set and the passed set.\n *\n * @param {lunr.SortedSet} otherSet The set to union with this set.\n * @returns {lunr.SortedSet}\n * @memberOf SortedSet\n */\nlunr.SortedSet.prototype.union = function (otherSet) {\n var longSet, shortSet, unionSet\n\n if (this.length >= otherSet.length) {\n longSet = this, shortSet = otherSet\n } else {\n longSet = otherSet, shortSet = this\n }\n\n unionSet = longSet.clone()\n\n for(var i = 0, shortSetElements = shortSet.toArray(); i < shortSetElements.length; i++){\n unionSet.add(shortSetElements[i])\n }\n\n return unionSet\n}\n\n/**\n * Returns a representation of the sorted set ready for serialisation.\n *\n * @returns {Array}\n * @memberOf SortedSet\n */\nlunr.SortedSet.prototype.toJSON = function () {\n return this.toArray()\n}\n /**\n * export the module via AMD, CommonJS or as a browser global\n * Export code from https://github.com/umdjs/umd/blob/master/returnExports.js\n */\n ;(function (root, factory) {\n if (typeof define === 'function' && define.amd) {\n // AMD. Register as an anonymous module.\n define(factory)\n } else if (typeof exports === 'object') {\n /**\n * Node. Does not work with strict CommonJS, but\n * only CommonJS-like enviroments that support module.exports,\n * like Node.\n */\n module.exports = factory()\n } else {\n // Browser globals (root is window)\n root.elasticlunr = factory()\n }\n }(this, function () {\n /**\n * Just return a value to define the module export.\n * This example returns an object, but the module\n * can return a function as the exported value.\n */\n return elasticlunr\n }))\n})();\n"],"sourceRoot":""}