aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorStephen Blott2015-05-02 12:39:50 +0100
committerStephen Blott2015-05-02 14:35:49 +0100
commita855cc15393fbf6296ac1ecf278c5f1b736c81b9 (patch)
tree8305635060d096e1c58129728c528ceca0f53336
parent41495d11e6608767dde299223f10c8a606d4a8fb (diff)
downloadvimium-a855cc15393fbf6296ac1ecf278c5f1b736c81b9.tar.bz2
Search completion; cache at a higher level.
... and tweak caching constants.
-rw-r--r--background_scripts/search_engines.coffee42
1 files changed, 25 insertions, 17 deletions
diff --git a/background_scripts/search_engines.coffee b/background_scripts/search_engines.coffee
index 3dfea180..f07a9df4 100644
--- a/background_scripts/search_engines.coffee
+++ b/background_scripts/search_engines.coffee
@@ -17,15 +17,13 @@ class Google
getUrl: (queryTerms) ->
"http://suggestqueries.google.com/complete/search?ss_protocol=legace&client=toolbar&q=#{Utils.createSearchQuery queryTerms}"
- parse: (xhr, callback) ->
- if suggestions = xhr?.responseXML?.getElementsByTagName "suggestion"
- suggestions =
- for suggestion in suggestions
- continue unless suggestion = suggestion.getAttribute "data"
- suggestion
- callback suggestions
- else
- callback []
+ # Returns a list of suggestions (strings).
+ parse: (xhr) ->
+ suggestions = xhr?.responseXML?.getElementsByTagName "suggestion"
+ return [] unless suggestions
+ for suggestion in suggestions
+ continue unless suggestion = suggestion.getAttribute "data"
+ suggestion
# A dummy search engine which is guaranteed to match any search URL, but never produces completions. This
# allows the rest of the logic to be written knowing that there will be a search engine match.
@@ -35,7 +33,7 @@ class DummySearchEngine
match: -> true
# We return a useless URL which we know will succeed, but which won't generate any network traffic.
getUrl: -> chrome.runtime.getURL "content_scripts/vimium.css"
- parse: (_, callback) -> callback []
+ parse: -> []
completionEngines = [ Google, DummySearchEngine ]
@@ -53,10 +51,8 @@ SearchEngines =
@requests ?= {} # Maps searchUrls to any outstanding HTTP request for that search engine.
@cancel searchUrl
- # We cache the results of the most-recent 1000 requests (with a two-hour expiry).
- # FIXME(smblott) Currently we're caching XMLHttpRequest objects, which is wasteful of memory. It would be
- # better to handle caching at a higher level.
- @requestCache ?= new SimpleCache 2 * 60 * 60 * 1000, 1000
+ # We cache the results of the most-recent 1000 requests with a one-minute expiry.
+ @requestCache ?= new SimpleCache 1 * 60 * 1000, 1000
if @requestCache.has url
callback @requestCache.get url
@@ -81,7 +77,7 @@ SearchEngines =
# there will always be a match. Imagining that there may be many search engines, and knowing that this is
# called for every character entered, we cache the result.
lookupEngine: (searchUrl) ->
- @engineCache ?= new SimpleCache 24 * 60 * 60 * 1000
+ @engineCache ?= new SimpleCache 30 * 60 * 60 * 1000 # 30 hours (these are small, we can keep them longer).
if @engineCache.has searchUrl
@engineCache.get searchUrl
else
@@ -103,10 +99,22 @@ SearchEngines =
# Don't try to complete Javascrip URLs.
return callback [] if 0 < queryTerms.length and Utils.hasJavascriptPrefix queryTerms[0]
+ # Cache completions. However, completions depend upon both the searchUrl and the query terms. So we need
+ # to generate a key. We mix in some nonsense generated by pwgen. There is the possibility of a key clash,
+ # but it's vanishingly small.
+ junk = "//Zi?ei5;o//"
+ completionCacheKey = searchUrl + junk + queryTerms.join junk
+ @completionCache ?= new SimpleCache 6 * 60 * 60 * 1000, 2000 # Six hours, 2000 entries.
+ if @completionCache.has completionCacheKey
+ return callback @completionCache.get completionCacheKey
+
engine = @lookupEngine searchUrl
url = engine.getUrl queryTerms
- @get searchUrl, url, (xhr = null) ->
- if xhr? then engine.parse xhr, callback else callback []
+ @get searchUrl, url, (xhr = null) =>
+ if xhr?
+ callback @completionCache.set completionCacheKey, engine.parse xhr
+ else
+ callback []
root = exports ? window
root.SearchEngines = SearchEngines