mirror of
https://github.com/pixelfed/pixelfed.git
synced 2024-11-26 08:13:16 +00:00
Update bloodhound.js lib
This commit is contained in:
parent
dbfc401987
commit
594d653980
1 changed files with 50 additions and 16 deletions
66
resources/assets/js/lib/bloodhound.js
vendored
Normal file → Executable file
66
resources/assets/js/lib/bloodhound.js
vendored
Normal file → Executable file
|
@ -1,18 +1,18 @@
|
|||
/*!
|
||||
* typeahead.js 0.11.1
|
||||
* typeahead.js 1.2.0
|
||||
* https://github.com/twitter/typeahead.js
|
||||
* Copyright 2013-2015 Twitter, Inc. and other contributors; Licensed MIT
|
||||
* Copyright 2013-2017 Twitter, Inc. and other contributors; Licensed MIT
|
||||
*/
|
||||
|
||||
(function(root, factory) {
|
||||
if (typeof define === "function" && define.amd) {
|
||||
define("bloodhound", [ "jquery" ], function(a0) {
|
||||
define([ "jquery" ], function(a0) {
|
||||
return root["Bloodhound"] = factory(a0);
|
||||
});
|
||||
} else if (typeof exports === "object") {
|
||||
module.exports = factory(require("jquery"));
|
||||
} else {
|
||||
root["Bloodhound"] = factory(jQuery);
|
||||
root["Bloodhound"] = factory(root["jQuery"]);
|
||||
}
|
||||
})(this, function($) {
|
||||
var _ = function() {
|
||||
|
@ -148,18 +148,27 @@
|
|||
stringify: function(val) {
|
||||
return _.isString(val) ? val : JSON.stringify(val);
|
||||
},
|
||||
guid: function() {
|
||||
function _p8(s) {
|
||||
var p = (Math.random().toString(16) + "000000000").substr(2, 8);
|
||||
return s ? "-" + p.substr(0, 4) + "-" + p.substr(4, 4) : p;
|
||||
}
|
||||
return "tt-" + _p8() + _p8(true) + _p8(true) + _p8();
|
||||
},
|
||||
noop: function() {}
|
||||
};
|
||||
}();
|
||||
var VERSION = "0.11.1";
|
||||
var VERSION = "1.2.0";
|
||||
var tokenizers = function() {
|
||||
"use strict";
|
||||
return {
|
||||
nonword: nonword,
|
||||
whitespace: whitespace,
|
||||
ngram: ngram,
|
||||
obj: {
|
||||
nonword: getObjTokenizer(nonword),
|
||||
whitespace: getObjTokenizer(whitespace)
|
||||
whitespace: getObjTokenizer(whitespace),
|
||||
ngram: getObjTokenizer(ngram)
|
||||
}
|
||||
};
|
||||
function whitespace(str) {
|
||||
|
@ -170,6 +179,19 @@
|
|||
str = _.toStr(str);
|
||||
return str ? str.split(/\W+/) : [];
|
||||
}
|
||||
function ngram(str) {
|
||||
str = _.toStr(str);
|
||||
var tokens = [], word = "";
|
||||
_.each(str.split(""), function(char) {
|
||||
if (char.match(/\s+/)) {
|
||||
word = "";
|
||||
} else {
|
||||
tokens.push(word + char);
|
||||
word += char;
|
||||
}
|
||||
});
|
||||
return tokens;
|
||||
}
|
||||
function getObjTokenizer(tokenizer) {
|
||||
return function setKey(keys) {
|
||||
keys = _.isArray(keys) ? keys : [].slice.call(arguments, 0);
|
||||
|
@ -341,9 +363,10 @@
|
|||
}();
|
||||
var Transport = function() {
|
||||
"use strict";
|
||||
var pendingRequestsCount = 0, pendingRequests = {}, maxPendingRequests = 6, sharedCache = new LruCache(10);
|
||||
var pendingRequestsCount = 0, pendingRequests = {}, sharedCache = new LruCache(10);
|
||||
function Transport(o) {
|
||||
o = o || {};
|
||||
this.maxPendingRequests = o.maxPendingRequests || 6;
|
||||
this.cancelled = false;
|
||||
this.lastReq = null;
|
||||
this._send = o.transport;
|
||||
|
@ -351,7 +374,7 @@
|
|||
this._cache = o.cache === false ? new LruCache(0) : sharedCache;
|
||||
}
|
||||
Transport.setMaxPendingRequests = function setMaxPendingRequests(num) {
|
||||
maxPendingRequests = num;
|
||||
this.maxPendingRequests = num;
|
||||
};
|
||||
Transport.resetCache = function resetCache() {
|
||||
sharedCache.reset();
|
||||
|
@ -369,7 +392,7 @@
|
|||
}
|
||||
if (jqXhr = pendingRequests[fingerprint]) {
|
||||
jqXhr.done(done).fail(fail);
|
||||
} else if (pendingRequestsCount < maxPendingRequests) {
|
||||
} else if (pendingRequestsCount < this.maxPendingRequests) {
|
||||
pendingRequestsCount++;
|
||||
pendingRequests[fingerprint] = this._send(o).done(done).fail(fail).always(always);
|
||||
} else {
|
||||
|
@ -423,6 +446,7 @@
|
|||
this.identify = o.identify || _.stringify;
|
||||
this.datumTokenizer = o.datumTokenizer;
|
||||
this.queryTokenizer = o.queryTokenizer;
|
||||
this.matchAnyQueryToken = o.matchAnyQueryToken;
|
||||
this.reset();
|
||||
}
|
||||
_.mixin(SearchIndex.prototype, {
|
||||
|
@ -459,7 +483,7 @@
|
|||
tokens = normalizeTokens(this.queryTokenizer(query));
|
||||
_.each(tokens, function(token) {
|
||||
var node, chars, ch, ids;
|
||||
if (matches && matches.length === 0) {
|
||||
if (matches && matches.length === 0 && !that.matchAnyQueryToken) {
|
||||
return false;
|
||||
}
|
||||
node = that.trie;
|
||||
|
@ -471,8 +495,10 @@
|
|||
ids = node[IDS].slice(0);
|
||||
matches = matches ? getIntersection(matches, ids) : ids;
|
||||
} else {
|
||||
matches = [];
|
||||
return false;
|
||||
if (!that.matchAnyQueryToken) {
|
||||
matches = [];
|
||||
return false;
|
||||
}
|
||||
}
|
||||
});
|
||||
return matches ? _.map(unique(matches), function(id) {
|
||||
|
@ -614,10 +640,12 @@
|
|||
this.url = o.url;
|
||||
this.prepare = o.prepare;
|
||||
this.transform = o.transform;
|
||||
this.indexResponse = o.indexResponse;
|
||||
this.transport = new Transport({
|
||||
cache: o.cache,
|
||||
limiter: o.limiter,
|
||||
transport: o.transport
|
||||
transport: o.transport,
|
||||
maxPendingRequests: o.maxPendingRequests
|
||||
});
|
||||
}
|
||||
_.mixin(Remote.prototype, {
|
||||
|
@ -655,7 +683,9 @@
|
|||
identify: _.stringify,
|
||||
datumTokenizer: null,
|
||||
queryTokenizer: null,
|
||||
matchAnyQueryToken: false,
|
||||
sufficient: 5,
|
||||
indexRemote: false,
|
||||
sorter: null,
|
||||
local: [],
|
||||
prefetch: null,
|
||||
|
@ -744,7 +774,7 @@
|
|||
} else if (o.wildcard) {
|
||||
prepare = prepareByWildcard;
|
||||
} else {
|
||||
prepare = idenityPrepare;
|
||||
prepare = identityPrepare;
|
||||
}
|
||||
return prepare;
|
||||
function prepareByReplace(query, settings) {
|
||||
|
@ -755,7 +785,7 @@
|
|||
settings.url = settings.url.replace(wildcard, encodeURIComponent(query));
|
||||
return settings;
|
||||
}
|
||||
function idenityPrepare(query, settings) {
|
||||
function identityPrepare(query, settings) {
|
||||
return settings;
|
||||
}
|
||||
}
|
||||
|
@ -806,6 +836,7 @@
|
|||
this.sorter = o.sorter;
|
||||
this.identify = o.identify;
|
||||
this.sufficient = o.sufficient;
|
||||
this.indexRemote = o.indexRemote;
|
||||
this.local = o.local;
|
||||
this.remote = o.remote ? new Remote(o.remote) : null;
|
||||
this.prefetch = o.prefetch ? new Prefetch(o.prefetch) : null;
|
||||
|
@ -875,6 +906,8 @@
|
|||
},
|
||||
search: function search(query, sync, async) {
|
||||
var that = this, local;
|
||||
sync = sync || _.noop;
|
||||
async = async || _.noop;
|
||||
local = this.sorter(this.index.search(query));
|
||||
sync(this.remote ? local.slice() : local);
|
||||
if (this.remote && local.length < this.sufficient) {
|
||||
|
@ -890,7 +923,8 @@
|
|||
return that.identify(r) === that.identify(l);
|
||||
}) && nonDuplicates.push(r);
|
||||
});
|
||||
async && async(nonDuplicates);
|
||||
that.indexRemote && that.add(nonDuplicates);
|
||||
async(nonDuplicates);
|
||||
}
|
||||
},
|
||||
all: function all() {
|
||||
|
|
Loading…
Reference in a new issue