aboutsummaryrefslogtreecommitdiffstats
path: root/background_scripts/search_engines.coffee
blob: 5d69d087bf614ee0ba56e5d519d489ed7c971cc7 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
# Each completer implements three functions:
#
#   match:  can this completer be used for this search URL?
#   getUrl: map these query terms to a completion URL.
#   parse:  extract suggestions from the resulting (successful) XMLHttpRequest.
#
Google =
  name: "Google"
  match: (searchUrl) ->
    true # TBD.

  getUrl: (queryTerms) ->
    "http://suggestqueries.google.com/complete/search?ss_protocol=legace&client=toolbar&q=#{Utils.createSearchQuery queryTerms}"

  parse: (xhr, callback) ->
    if suggestions = xhr?.responseXML?.getElementsByTagName "suggestion"
      suggestions =
        for suggestion in suggestions
          continue unless suggestion = suggestion.getAttribute "data"
          suggestion
      callback suggestions
    else
      callback []

# A dummy search engine which is guaranteed to match any search URL, but produces no completions.  This allows
# the rest of the logic to be written knowing that there will be a search engine match.
DummySearchEngine =
  name: "Dummy"
  match: -> true
  # We return a useless URL which we know will succeed, but which won't generate any network traffic.
  getUrl: -> chrome.runtime.getURL "content_scripts/vimium.css"
  parse: (_, callback) -> callback []

CompletionEngines = [ Google, DummySearchEngine ]

SearchEngines =
  cancel: (searchUrl, callback = null) ->
    @requests[searchUrl]?.abort()
    delete @requests[searchUrl]
    callback? null

  # Perform and HTTP GET.
  #   searchUrl is the search engine's URL, e.g. Settings.get("searchUrl")
  #   url is the URL to fetch
  #   callback will be called a successful XMLHttpRequest object, or null.
  get: (searchUrl, url, callback) ->
    @requests ?= {} # Maps searchUrls to any outstanding HTTP request for that search engine.
    @cancel searchUrl

    # We cache the results of recent requests (with a two-hour expiry).
    @requestCache ?= new SimpleCache 2 * 60 * 60 * 1000

    if @requestCache.has url
      callback @requestCache.get url
      return

    @requests[searchUrl] = xhr = new XMLHttpRequest()
    xhr.open "GET", url, true
    xhr.timeout = 500
    xhr.ontimeout = => @cancel searchUrl, callback
    xhr.onerror = => @cancel searchUrl, callback
    xhr.send()

    xhr.onreadystatechange = =>
      if xhr.readyState == 4
        if xhr.status == 200
          @requests[searchUrl] = null
          callback @requestCache.set url, xhr
        else
          callback null

  # Look up the search engine for this search URL.  Because of DummySearchEngine, above, we know there will
  # always be a match.  Imagining that there may be many search engines, and knowing that this is called for
  # every character entered, we cache the result.
  lookupEngine: (searchUrl) ->
    @engineCache ?= new SimpleCache 24 * 60 * 60 * 1000
    if @engineCache.has searchUrl
      @engineCache.get searchUrl
    else
      for engine in CompletionEngines
        return @engineCache.set searchUrl, engine if engine.match searchUrl

  # This is the main (actually, the only) entry point.
  #   searchUrl is the search engine's URL, e.g. Settings.get("searchUrl")
  #   queryTerms are the queryTerms
  #   callback will be applied to a list of suggestion strings (which will be an empty list, if anything goes
  #   wrong).
  complete: (searchUrl, queryTerms, callback) ->
    return callback [] unless 0 < queryTerms.length

    # Don't try to complete general URLs.
    return callback [] if 1 == queryTerms.length and Utils.isUrl queryTerms[0]

    # Don't try to complete Javascrip URLs.
    return callback [] if 0 < queryTerms.length and Utils.hasJavascriptPrefix queryTerms[0]

    engine = @lookupEngine searchUrl
    url = engine.getUrl queryTerms
    @get searchUrl, url, (xhr = null) ->
      if xhr? then engine.parse xhr, callback else callback []

root = exports ? window
root.SearchEngines = SearchEngines