diff options
Diffstat (limited to 'background_scripts/search_engines.coffee')
| -rw-r--r-- | background_scripts/search_engines.coffee | 85 | 
1 files changed, 37 insertions, 48 deletions
| diff --git a/background_scripts/search_engines.coffee b/background_scripts/search_engines.coffee index abf8c86e..3ddbe742 100644 --- a/background_scripts/search_engines.coffee +++ b/background_scripts/search_engines.coffee @@ -114,38 +114,16 @@ completionEngines = [  ]  SearchEngines = -  cancel: (searchUrl, callback = null) -> -    @requests[searchUrl]?.abort() -    delete @requests[searchUrl] -    callback? null - -  # Perform an HTTP GET.    get: (searchUrl, url, callback) -> -    @requests ?= {} # Maps a searchUrl to any outstanding HTTP request for that search engine. -    @cancel searchUrl - -    # We cache the results of the most-recent 100 successfully XMLHttpRequests with a ten-second (ie. very -    # short) expiry. -    @requestCache ?= new SimpleCache 10 * 1000, 100 - -    if @requestCache.has url -      callback @requestCache.get url -      return - -    @requests[searchUrl] = xhr = new XMLHttpRequest() +    xhr = new XMLHttpRequest()      xhr.open "GET", url, true -    xhr.timeout = 750 -    xhr.ontimeout = => @cancel searchUrl, callback -    xhr.onerror = => @cancel searchUrl, callback +    xhr.timeout = 1000 +    xhr.ontimeout = xhr.onerror = -> callback null      xhr.send() -    xhr.onreadystatechange = => +    xhr.onreadystatechange = ->        if xhr.readyState == 4 -        @requests[searchUrl] = null -        if xhr.status == 200 -          callback @requestCache.set url, xhr -        else -          callback null +        callback(if xhr.status == 200 then xhr else null)    # Look up the search-completion engine for this searchUrl.  Because of DummySearchEngine, above, we know    # there will always be a match.  Imagining that there may be many completion engines, and knowing that this @@ -176,7 +154,7 @@ SearchEngines =      return callback [] if Utils.hasJavascriptPrefix queryTerms[0]      # Cache completions.  However, completions depend upon both the searchUrl and the query terms.  So we need -    # to generate a key.  We mix in some nonsense generated by pwgen. A key clash is possible, but vanishingly +    # to generate a key.  We mix in some junk generated by pwgen. A key clash is possible, but vanishingly      # unlikely.      junk = "//Zi?ei5;o//"      completionCacheKey = searchUrl + junk + queryTerms.join junk @@ -184,26 +162,37 @@ SearchEngines =      if @completionCache.has completionCacheKey        return callback @completionCache.get completionCacheKey -    engine = @lookupEngine searchUrl -    url = engine.getUrl queryTerms -    query = queryTerms.join(" ").toLowerCase() -    @get searchUrl, url, (xhr = null) => -      # Parsing the response may fail if we receive an unexpected or an unexpectedly-formatted response.  In -      # all cases, we fall back to the catch clause, below. -      try -        suggestions = engine.parse xhr -        # Make sure we really do have an iterable of strings. -        suggestions = (suggestion for suggestion in suggestions when "string" == typeof suggestion) -        # Filter out the query itself. It's not adding anything. -        suggestions = (suggestion for suggestion in suggestions when suggestion.toLowerCase() != query) -        # We keep at most three suggestions, the top three. -        callback @completionCache.set completionCacheKey, suggestions[...3] -      catch -        callback @completionCache.set completionCacheKey, callback [] -        # We cache failures, but remove them after just ten minutes.  This (it is hoped) avoids repeated -        # XMLHttpRequest failures over a short period of time. -        removeCompletionCacheKey = => @completionCache.set completionCacheKey, null -        setTimeout removeCompletionCacheKey, 10 * 60 * 1000 # Ten minutes. +    fetchSuggestions = (callback) => +      engine = @lookupEngine searchUrl +      url = engine.getUrl queryTerms +      query = queryTerms.join(" ").toLowerCase() +      @get searchUrl, url, (xhr = null) => +        # Parsing the response may fail if we receive an unexpected or an unexpectedly-formatted response.  In +        # all cases, we fall back to the catch clause, below. +        try +          suggestions = engine.parse xhr +          # Make sure we really do have an iterable of strings. +          suggestions = (suggestion for suggestion in suggestions when "string" == typeof suggestion) +          # Filter out the query itself. It's not adding anything. +          suggestions = (suggestion for suggestion in suggestions when suggestion.toLowerCase() != query) +        catch +          suggestions = [] +          # We cache failures, but remove them after just ten minutes.  This (it is hoped) avoids repeated +          # XMLHttpRequest failures over a short period of time. +          removeCompletionCacheKey = => @completionCache.set completionCacheKey, null +          setTimeout removeCompletionCacheKey, 10 * 60 * 1000 # Ten minutes. + +        callback suggestions + +    # Don't allow duplicate identical active requests.  This can happen, for example, when the user enters or +    # removes a space, or when they enter a character and immediately delete it. +    @inTransit ?= {} +    unless @inTransit[completionCacheKey]?.push callback +      queue = @inTransit[completionCacheKey] = [] +      fetchSuggestions (suggestions) => +        callback @completionCache.set completionCacheKey, suggestions +        delete @inTransit[completionCacheKey] +        callback suggestions for callback in queue  root = exports ? window  root.SearchEngines = SearchEngines | 
