@@ -36,6 +36,10 @@ DATABASE_PASSWORD="" |
||
36 | 36 |
|
37 | 37 |
# Outgoing email settings. To use Gmail or Google Apps, put your Google Apps domain or gmail.com |
38 | 38 |
# as the SMTP_DOMAIN and your Gmail username and password as the SMTP_USER_NAME and SMTP_PASSWORD. |
39 |
+# |
|
40 |
+# PLEASE NOTE: In order to enable emails locally (e.g., when not in the production Rails environment), |
|
41 |
+# you must also change config.action_mailer.perform_deliveries in config/environments/development.rb. |
|
42 |
+ |
|
39 | 43 |
SMTP_DOMAIN=your-domain-here.com |
40 | 44 |
SMTP_USER_NAME=you@gmail.com |
41 | 45 |
SMTP_PASSWORD=somepassword |
@@ -0,0 +1,118 @@ |
||
1 |
+require 'date' |
|
2 |
+require 'cgi' |
|
3 |
+module Agents |
|
4 |
+ class PublicTransportAgent < Agent |
|
5 |
+ cannot_receive_events! |
|
6 |
+ description <<-MD |
|
7 |
+ Specify the following user settings: |
|
8 |
+ |
|
9 |
+ * stops (array) |
|
10 |
+ * agency (string) |
|
11 |
+ * alert_window_in_minutes (integer) |
|
12 |
+ |
|
13 |
+ This Agent generates Events based on NextBus GPS transit predictions. First, select an agency by visiting [http://www.nextbus.com/predictor/agencySelector.jsp](http://www.nextbus.com/predictor/agencySelector.jsp) and finding your transit system. Once you find it, copy the part of the URL after `?a=`. For example, for the San Francisco MUNI system, you would end up on [http://www.nextbus.com/predictor/stopSelector.jsp?a=**sf-muni**](http://www.nextbus.com/predictor/stopSelector.jsp?a=sf-muni) and copy "sf-muni". Put that into this Agent's agency setting. |
|
14 |
+ |
|
15 |
+ Next, find the stop tags that you care about. To find the tags for the sf-muni system, for the N route, visit this URL: |
|
16 |
+ [http://webservices.nextbus.com/service/publicXMLFeed?command=routeConfig&a=sf-muni&r=**N**](http://webservices.nextbus.com/service/publicXMLFeed?command=routeConfig&a=sf-muni&r=N) |
|
17 |
+ |
|
18 |
+ The tags are listed as tag="1234". Copy that number and add the route before it, separated by a pipe '|' symbol. Once you have one or more tags from that page, add them to this Agent's stop list. E.g, |
|
19 |
+ |
|
20 |
+ agency: "sf-muni" |
|
21 |
+ stops: ["N|5221", "N|5215"] |
|
22 |
+ |
|
23 |
+ This Agent will generate predictions by requesting a URL similar to the following: |
|
24 |
+ |
|
25 |
+ [http://webservices.nextbus.com/service/publicXMLFeed?command=predictionsForMultiStops&a=sf-muni&stops=N|5221&stops=N|5215](http://webservices.nextbus.com/service/publicXMLFeed?command=predictionsForMultiStops&a=sf-muni&stops=N|5221&stops=N|5215) |
|
26 |
+ |
|
27 |
+ Finally, set the arrival window that you're interested in. E.g., 5 minutes. Events will be created by the agent anytime a new train or bus comes into that time window. |
|
28 |
+ |
|
29 |
+ alert_window_in_minutes: 5 |
|
30 |
+ |
|
31 |
+ This memory should get cleaned up when timestamp is older than an hour (or something) so that it doesn't fill up all of the Agent's memory. |
|
32 |
+ MD |
|
33 |
+ |
|
34 |
+ |
|
35 |
+ default_schedule "every_2m" |
|
36 |
+ |
|
37 |
+ event_description <<-MD |
|
38 |
+ Events look like this: |
|
39 |
+ { "routeTitle":"N-Judah", |
|
40 |
+ "stopTag":"5215", |
|
41 |
+ "prediction": |
|
42 |
+ {"epochTime":"1389622846689", |
|
43 |
+ "seconds":"3454","minutes":"57","isDeparture":"false", |
|
44 |
+ "affectedByLayover":"true","dirTag":"N__OB4KJU","vehicle":"1489", |
|
45 |
+ "block":"9709","tripTag":"5840086" |
|
46 |
+ } |
|
47 |
+ } |
|
48 |
+ MD |
|
49 |
+ |
|
50 |
+ def check_url |
|
51 |
+ stop_query = URI.encode(options["stops"].collect{|a| "&stops=#{a}"}.join) |
|
52 |
+ "http://webservices.nextbus.com/service/publicXMLFeed?command=predictionsForMultiStops&a=#{options["agency"]}#{stop_query}" |
|
53 |
+ end |
|
54 |
+ |
|
55 |
+ def stops |
|
56 |
+ options["stops"].collect{|a| a.split("|").last} |
|
57 |
+ end |
|
58 |
+ def check |
|
59 |
+ hydra = Typhoeus::Hydra.new |
|
60 |
+ request = Typhoeus::Request.new(check_url, :followlocation => true) |
|
61 |
+ request.on_success do |response| |
|
62 |
+ page = Nokogiri::XML response.body |
|
63 |
+ predictions = page.css("//prediction") |
|
64 |
+ predictions.each do |pr| |
|
65 |
+ parent = pr.parent.parent |
|
66 |
+ vals = {"routeTitle" => parent["routeTitle"], "stopTag" => parent["stopTag"]} |
|
67 |
+ if pr["minutes"] && pr["minutes"].to_i < options["alert_window_in_minutes"].to_i |
|
68 |
+ vals = vals.merge Hash.from_xml(pr.to_xml) |
|
69 |
+ if not_already_in_memory?(vals) |
|
70 |
+ create_event(:payload => vals) |
|
71 |
+ log "creating event..." |
|
72 |
+ update_memory(vals) |
|
73 |
+ else |
|
74 |
+ log "not creating event since already in memory" |
|
75 |
+ end |
|
76 |
+ end |
|
77 |
+ end |
|
78 |
+ end |
|
79 |
+ hydra.queue request |
|
80 |
+ hydra.run |
|
81 |
+ end |
|
82 |
+ def update_memory(vals) |
|
83 |
+ add_to_memory(vals) |
|
84 |
+ cleanup_old_memory |
|
85 |
+ end |
|
86 |
+ def cleanup_old_memory |
|
87 |
+ self.memory["existing_routes"] ||= [] |
|
88 |
+ self.memory["existing_routes"].reject!{|h| h["currentTime"].to_time <= (Time.now - 2.hours)} |
|
89 |
+ end |
|
90 |
+ def add_to_memory(vals) |
|
91 |
+ self.memory["existing_routes"] ||= [] |
|
92 |
+ self.memory["existing_routes"] << {"stopTag" => vals["stopTag"], "tripTag" => vals["prediction"]["tripTag"], "epochTime" => vals["prediction"]["epochTime"], "currentTime" => Time.now} |
|
93 |
+ end |
|
94 |
+ def not_already_in_memory?(vals) |
|
95 |
+ m = self.memory["existing_routes"] || [] |
|
96 |
+ m.select{|h| h['stopTag'] == vals["stopTag"] && |
|
97 |
+ h['tripTag'] == vals["prediction"]["tripTag"] && |
|
98 |
+ h['epochTime'] == vals["prediction"]["epochTime"] |
|
99 |
+ }.count == 0 |
|
100 |
+ end |
|
101 |
+ def default_options |
|
102 |
+ { |
|
103 |
+ agency: "sf-muni", |
|
104 |
+ stops: ["N|5221", "N|5215"], |
|
105 |
+ alert_window_in_minutes: 5 |
|
106 |
+ } |
|
107 |
+ end |
|
108 |
+ |
|
109 |
+ def validate_options |
|
110 |
+ errors.add(:base, 'agency is required') unless options['agency'].present? |
|
111 |
+ errors.add(:base, 'alert_window_in_minutes is required') unless options['alert_window_in_minutes'].present? |
|
112 |
+ errors.add(:base, 'stops are required') unless options['stops'].present? |
|
113 |
+ end |
|
114 |
+ def working? |
|
115 |
+ event_created_within?(2) && !recent_error_logs? |
|
116 |
+ end |
|
117 |
+ end |
|
118 |
+end |
@@ -46,7 +46,7 @@ module Agents |
||
46 | 46 |
end |
47 | 47 |
|
48 | 48 |
def working? |
49 |
- event_created_within(options['expected_receive_period_in_days']) && !recent_error_logs? |
|
49 |
+ event_created_within?(options['expected_receive_period_in_days']) && !recent_error_logs? |
|
50 | 50 |
end |
51 | 51 |
|
52 | 52 |
def validate_options |
@@ -32,6 +32,8 @@ module Agents |
||
32 | 32 |
|
33 | 33 |
Note that for all of the formats, whatever you extract MUST have the same number of matches for each extractor. E.g., if you're extracting rows, all extractors must match all rows. For generating CSS selectors, something like [SelectorGadget](http://selectorgadget.com) may be helpful. |
34 | 34 |
|
35 |
+ Can be configured to use HTTP basic auth by including the `basic_auth` parameter with `username:password`. |
|
36 |
+ |
|
35 | 37 |
Set `expected_update_period_in_days` to the maximum amount of time that you'd expect to pass between Events being created by this Agent. |
36 | 38 |
MD |
37 | 39 |
|
@@ -70,7 +72,11 @@ module Agents |
||
70 | 72 |
def check |
71 | 73 |
hydra = Typhoeus::Hydra.new |
72 | 74 |
log "Fetching #{options['url']}" |
73 |
- request = Typhoeus::Request.new(options['url'], :followlocation => true) |
|
75 |
+ request_opts = {:followlocation => true} |
|
76 |
+ if options['basic_auth'].present? |
|
77 |
+ request_opts[:userpwd] = options['basic_auth'] |
|
78 |
+ end |
|
79 |
+ request = Typhoeus::Request.new(options['url'], request_opts) |
|
74 | 80 |
request.on_failure do |response| |
75 | 81 |
error "Failed: #{response.inspect}" |
76 | 82 |
end |
@@ -47,20 +47,30 @@ class SwitchToJsonSerialization < ActiveRecord::Migration |
||
47 | 47 |
quoted_table_name = ActiveRecord::Base.connection.quote_table_name(table) |
48 | 48 |
fields = fields.map { |f| ActiveRecord::Base.connection.quote_column_name(f) } |
49 | 49 |
|
50 |
- rows = ActiveRecord::Base.connection.select_rows("SELECT id, #{fields.join(", ")} FROM #{quoted_table_name}") |
|
51 |
- rows.each do |row| |
|
52 |
- id, *field_data = row |
|
50 |
+ page_start = 0 |
|
51 |
+ page_size = 1000 |
|
52 |
+ page_end = page_start + page_size |
|
53 | 53 |
|
54 |
- yaml_fields = field_data.map { |f| from.load(f) }.map { |f| to.dump(f) } |
|
54 |
+ begin |
|
55 |
+ rows = ActiveRecord::Base.connection.select_rows("SELECT id, #{fields.join(", ")} FROM #{quoted_table_name} WHERE id >= #{page_start} AND id < #{page_end}") |
|
56 |
+ puts "Grabbing rows of #{table} from #{page_start} to #{page_end}" |
|
57 |
+ rows.each do |row| |
|
58 |
+ id, *field_data = row |
|
55 | 59 |
|
56 |
- yaml_fields.map! {|f| f.encode('utf-8', 'binary', invalid: :replace, undef: :replace, replace: '??') } |
|
60 |
+ yaml_fields = field_data.map { |f| from.load(f) }.map { |f| to.dump(f) } |
|
57 | 61 |
|
58 |
- update_sql = "UPDATE #{quoted_table_name} SET #{fields.map {|f| "#{f}=?"}.join(", ")} WHERE id = ?" |
|
62 |
+ yaml_fields.map! {|f| f.encode('utf-8', 'binary', invalid: :replace, undef: :replace, replace: '??') } |
|
59 | 63 |
|
60 |
- sanitized_update_sql = ActiveRecord::Base.send :sanitize_sql_array, [update_sql, *yaml_fields, id] |
|
64 |
+ update_sql = "UPDATE #{quoted_table_name} SET #{fields.map {|f| "#{f}=?"}.join(", ")} WHERE id = ?" |
|
61 | 65 |
|
62 |
- ActiveRecord::Base.connection.execute sanitized_update_sql |
|
63 |
- end |
|
66 |
+ sanitized_update_sql = ActiveRecord::Base.send :sanitize_sql_array, [update_sql, *yaml_fields, id] |
|
67 |
+ |
|
68 |
+ ActiveRecord::Base.connection.execute sanitized_update_sql |
|
69 |
+ end |
|
70 |
+ page_start += page_size |
|
71 |
+ page_end += page_size |
|
72 |
+ |
|
73 |
+ end until rows.count == 0 |
|
64 | 74 |
end |
65 | 75 |
|
66 | 76 |
end |
@@ -0,0 +1,35 @@ |
||
1 |
+<?xml version="1.0" encoding="UTF-8"?> |
|
2 |
+<body copyright="All data copyright San Francisco Muni 2014."> |
|
3 |
+<predictions agencyTitle="San Francisco Muni" routeTitle="N-Judah" routeTag="N" stopTitle="Judah St & La Playa St" stopTag="5221"> |
|
4 |
+ <direction title="Outbound to Ocean Beach"> |
|
5 |
+ <prediction epochTime="1389707083293" seconds="1668" minutes="27" isDeparture="false" affectedByLayover="true" dirTag="N__OB3" vehicle="1443" block="9705" tripTag="5840326"/> |
|
6 |
+ <prediction epochTime="1389708835605" seconds="3420" minutes="57" isDeparture="false" affectedByLayover="true" dirTag="N__OB3" vehicle="1518" block="9708" tripTag="5840327"/> |
|
7 |
+ <prediction epochTime="1389709795605" seconds="4380" minutes="73" isDeparture="false" affectedByLayover="true" dirTag="N__OB3" vehicle="1404" block="9710" tripTag="5840328"/> |
|
8 |
+ </direction> |
|
9 |
+ <direction title="Outbound to Ocean Beach via Downtown"> |
|
10 |
+ <prediction epochTime="1389706393991" seconds="978" minutes="16" isDeparture="false" dirTag="N__OB4KJU" vehicle="1543" vehiclesInConsist="2" block="9703" tripTag="5840324"/> |
|
11 |
+ <prediction epochTime="1389706512784" seconds="1097" minutes="18" isDeparture="false" dirTag="N__OB4KJU" vehicle="1476" vehiclesInConsist="2" block="9704" tripTag="5840083"/> |
|
12 |
+ <prediction epochTime="1389707746994" seconds="2331" minutes="38" isDeparture="false" dirTag="N__OB4KJU" vehicle="1507" block="9706" tripTag="5840084"/> |
|
13 |
+ <prediction epochTime="1389708458668" seconds="3043" minutes="50" isDeparture="false" affectedByLayover="true" dirTag="N__OB4KJU" vehicle="1489" block="9707" tripTag="5840085"/> |
|
14 |
+ <prediction epochTime="1389709358668" seconds="3943" minutes="65" isDeparture="false" affectedByLayover="true" dirTag="N__OB4KJU" vehicle="1463" block="9709" tripTag="5840086"/> |
|
15 |
+ </direction> |
|
16 |
+<message text="No Elevator at |
|
17 |
+Van Ness Station"/> |
|
18 |
+</predictions> |
|
19 |
+<predictions agencyTitle="San Francisco Muni" routeTitle="N-Judah" routeTag="N" stopTitle="Judah St & 46th Ave" stopTag="5215"> |
|
20 |
+ <direction title="Outbound to Ocean Beach"> |
|
21 |
+ <prediction epochTime="1389706981164" seconds="1566" minutes="26" isDeparture="false" affectedByLayover="true" dirTag="N__OB3" vehicle="1443" block="9705" tripTag="5840326"/> |
|
22 |
+ <prediction epochTime="1389708733476" seconds="3318" minutes="55" isDeparture="false" affectedByLayover="true" dirTag="N__OB3" vehicle="1518" block="9708" tripTag="5840327"/> |
|
23 |
+ <prediction epochTime="1389709693476" seconds="4278" minutes="71" isDeparture="false" affectedByLayover="true" dirTag="N__OB3" vehicle="1404" block="9710" tripTag="5840328"/> |
|
24 |
+ </direction> |
|
25 |
+ <direction title="Outbound to Ocean Beach via Downtown"> |
|
26 |
+ <prediction epochTime="1389706282012" seconds="866" minutes="14" isDeparture="false" dirTag="N__OB4KJU" vehicle="1543" vehiclesInConsist="2" block="9703" tripTag="5840324"/> |
|
27 |
+ <prediction epochTime="1389706400805" seconds="985" minutes="16" isDeparture="false" dirTag="N__OB4KJU" vehicle="1476" vehiclesInConsist="2" block="9704" tripTag="5840083"/> |
|
28 |
+ <prediction epochTime="1389707635015" seconds="2219" minutes="36" isDeparture="false" dirTag="N__OB4KJU" vehicle="1507" block="9706" tripTag="5840084"/> |
|
29 |
+ <prediction epochTime="1389708346689" seconds="2931" minutes="48" isDeparture="false" affectedByLayover="true" dirTag="N__OB4KJU" vehicle="1489" block="9707" tripTag="5840085"/> |
|
30 |
+ <prediction epochTime="1389709246689" seconds="3831" minutes="63" isDeparture="false" affectedByLayover="true" dirTag="N__OB4KJU" vehicle="1463" block="9709" tripTag="5840086"/> |
|
31 |
+ </direction> |
|
32 |
+<message text="No Elevator at |
|
33 |
+Van Ness Station"/> |
|
34 |
+</predictions> |
|
35 |
+</body> |
@@ -0,0 +1,70 @@ |
||
1 |
+require 'spec_helper' |
|
2 |
+describe Agents::PublicTransportAgent do |
|
3 |
+ before do |
|
4 |
+ valid_params = { |
|
5 |
+ "name" => "sf muni agent", |
|
6 |
+ "options" => { |
|
7 |
+ "alert_window_in_minutes" => "20", |
|
8 |
+ "stops" => ['N|5221', 'N|5215'], |
|
9 |
+ "agency" => "sf-muni" |
|
10 |
+ } |
|
11 |
+ } |
|
12 |
+ @agent = Agents::PublicTransportAgent.new(valid_params) |
|
13 |
+ @agent.user = users(:bob) |
|
14 |
+ @agent.save! |
|
15 |
+ end |
|
16 |
+ |
|
17 |
+ describe "#check" do |
|
18 |
+ before do |
|
19 |
+ stub_request(:get, "http://webservices.nextbus.com/service/publicXMLFeed?a=sf-muni&command=predictionsForMultiStops&stops=N%7C5215"). |
|
20 |
+ with(:headers => {'User-Agent'=>'Typhoeus - https://github.com/typhoeus/typhoeus'}). |
|
21 |
+ to_return(:status => 200, :body => File.read(Rails.root.join("spec/data_fixtures/public_transport_agent.xml")), :headers => {}) |
|
22 |
+ stub(Time).now {"2014-01-14 20:21:30 +0500".to_time} |
|
23 |
+ end |
|
24 |
+ |
|
25 |
+ it "should create 4 events" do |
|
26 |
+ lambda { @agent.check }.should change {@agent.events.count}.by(4) |
|
27 |
+ end |
|
28 |
+ |
|
29 |
+ it "should add 4 items to memory" do |
|
30 |
+ @agent.memory.should == {} |
|
31 |
+ @agent.check |
|
32 |
+ @agent.memory.should == {"existing_routes" => [ |
|
33 |
+ {"stopTag"=>"5221", "tripTag"=>"5840324", "epochTime"=>"1389706393991", "currentTime"=>"2014-01-14 20:21:30 +0500"}, |
|
34 |
+ {"stopTag"=>"5221", "tripTag"=>"5840083", "epochTime"=>"1389706512784", "currentTime"=>"2014-01-14 20:21:30 +0500"}, |
|
35 |
+ {"stopTag"=>"5215", "tripTag"=>"5840324", "epochTime"=>"1389706282012", "currentTime"=>"2014-01-14 20:21:30 +0500"}, |
|
36 |
+ {"stopTag"=>"5215", "tripTag"=>"5840083", "epochTime"=>"1389706400805", "currentTime"=>"2014-01-14 20:21:30 +0500"} |
|
37 |
+ ] |
|
38 |
+ } |
|
39 |
+ end |
|
40 |
+ |
|
41 |
+ it "should not create events twice" do |
|
42 |
+ lambda { @agent.check }.should change {@agent.events.count}.by(4) |
|
43 |
+ lambda { @agent.check }.should_not change {@agent.events.count} |
|
44 |
+ end |
|
45 |
+ |
|
46 |
+ it "should reset memory after 2 hours" do |
|
47 |
+ lambda { @agent.check }.should change {@agent.events.count}.by(4) |
|
48 |
+ stub(Time).now {"2014-01-14 20:21:30 +0500".to_time + 3.hours} |
|
49 |
+ @agent.cleanup_old_memory |
|
50 |
+ lambda { @agent.check }.should change {@agent.events.count}.by(4) |
|
51 |
+ end |
|
52 |
+ end |
|
53 |
+ |
|
54 |
+ describe "validation" do |
|
55 |
+ it "should validate presence of stops" do |
|
56 |
+ @agent.options['stops'] = nil |
|
57 |
+ @agent.should_not be_valid |
|
58 |
+ end |
|
59 |
+ |
|
60 |
+ it "should validate presence of agency" do |
|
61 |
+ @agent.options['agency'] = "" |
|
62 |
+ @agent.should_not be_valid |
|
63 |
+ end |
|
64 |
+ |
|
65 |
+ it "should validate presence of alert_window_in_minutes" do |
|
66 |
+ @agent.options['alert_window_in_minutes'] = "" |
|
67 |
+ @agent.should_not be_valid |
|
68 |
+ end |
|
69 |
+ end |
|
70 |
+end |
@@ -1,189 +1,220 @@ |
||
1 | 1 |
require 'spec_helper' |
2 | 2 |
|
3 | 3 |
describe Agents::WebsiteAgent do |
4 |
- before do |
|
5 |
- stub_request(:any, /xkcd/).to_return(:body => File.read(Rails.root.join("spec/data_fixtures/xkcd.html")), :status => 200) |
|
6 |
- @site = { |
|
7 |
- 'name' => "XKCD", |
|
8 |
- 'expected_update_period_in_days' => 2, |
|
9 |
- 'type' => "html", |
|
10 |
- 'url' => "http://xkcd.com", |
|
11 |
- 'mode' => 'on_change', |
|
12 |
- 'extract' => { |
|
13 |
- 'url' => {'css' => "#comic img", 'attr' => "src"}, |
|
14 |
- 'title' => {'css' => "#comic img", 'attr' => "title"} |
|
4 |
+ describe "checking without basic auth" do |
|
5 |
+ before do |
|
6 |
+ stub_request(:any, /xkcd/).to_return(:body => File.read(Rails.root.join("spec/data_fixtures/xkcd.html")), :status => 200) |
|
7 |
+ @site = { |
|
8 |
+ 'name' => "XKCD", |
|
9 |
+ 'expected_update_period_in_days' => 2, |
|
10 |
+ 'type' => "html", |
|
11 |
+ 'url' => "http://xkcd.com", |
|
12 |
+ 'mode' => 'on_change', |
|
13 |
+ 'extract' => { |
|
14 |
+ 'url' => {'css' => "#comic img", 'attr' => "src"}, |
|
15 |
+ 'title' => {'css' => "#comic img", 'attr' => "title"} |
|
16 |
+ } |
|
15 | 17 |
} |
16 |
- } |
|
17 |
- @checker = Agents::WebsiteAgent.new(:name => "xkcd", :options => @site) |
|
18 |
- @checker.user = users(:bob) |
|
19 |
- @checker.save! |
|
20 |
- end |
|
21 |
- |
|
22 |
- describe "#check" do |
|
23 |
- it "should check for changes" do |
|
24 |
- lambda { @checker.check }.should change { Event.count }.by(1) |
|
25 |
- lambda { @checker.check }.should_not change { Event.count } |
|
18 |
+ @checker = Agents::WebsiteAgent.new(:name => "xkcd", :options => @site) |
|
19 |
+ @checker.user = users(:bob) |
|
20 |
+ @checker.save! |
|
26 | 21 |
end |
27 | 22 |
|
28 |
- it "should always save events when in :all mode" do |
|
29 |
- lambda { |
|
30 |
- @site['mode'] = 'all' |
|
23 |
+ describe "#check" do |
|
24 |
+ it "should check for changes" do |
|
25 |
+ lambda { @checker.check }.should change { Event.count }.by(1) |
|
26 |
+ lambda { @checker.check }.should_not change { Event.count } |
|
27 |
+ end |
|
28 |
+ |
|
29 |
+ it "should always save events when in :all mode" do |
|
30 |
+ lambda { |
|
31 |
+ @site['mode'] = 'all' |
|
32 |
+ @checker.options = @site |
|
33 |
+ @checker.check |
|
34 |
+ @checker.check |
|
35 |
+ }.should change { Event.count }.by(2) |
|
36 |
+ end |
|
37 |
+ |
|
38 |
+ it "should log an error if the number of results for a set of extraction patterns differs" do |
|
39 |
+ @site['extract']['url']['css'] = "div" |
|
31 | 40 |
@checker.options = @site |
32 | 41 |
@checker.check |
33 |
- @checker.check |
|
34 |
- }.should change { Event.count }.by(2) |
|
35 |
- end |
|
36 |
- |
|
37 |
- it "should log an error if the number of results for a set of extraction patterns differs" do |
|
38 |
- @site['extract']['url']['css'] = "div" |
|
39 |
- @checker.options = @site |
|
40 |
- @checker.check |
|
41 |
- @checker.logs.first.message.should =~ /Got an uneven number of matches/ |
|
42 |
+ @checker.logs.first.message.should =~ /Got an uneven number of matches/ |
|
43 |
+ end |
|
42 | 44 |
end |
43 |
- end |
|
44 | 45 |
|
45 |
- describe '#working?' do |
|
46 |
- it 'checks if events have been received within the expected receive period' do |
|
47 |
- stubbed_time = Time.now |
|
48 |
- stub(Time).now { stubbed_time } |
|
46 |
+ describe '#working?' do |
|
47 |
+ it 'checks if events have been received within the expected receive period' do |
|
48 |
+ stubbed_time = Time.now |
|
49 |
+ stub(Time).now { stubbed_time } |
|
49 | 50 |
|
50 |
- @checker.should_not be_working # No events created |
|
51 |
- @checker.check |
|
52 |
- @checker.reload.should be_working # Just created events |
|
53 |
- |
|
54 |
- @checker.error "oh no!" |
|
55 |
- @checker.reload.should_not be_working # There is a recent error |
|
51 |
+ @checker.should_not be_working # No events created |
|
52 |
+ @checker.check |
|
53 |
+ @checker.reload.should be_working # Just created events |
|
56 | 54 |
|
57 |
- stubbed_time = 20.minutes.from_now |
|
58 |
- @checker.events.delete_all |
|
59 |
- @checker.check |
|
60 |
- @checker.reload.should be_working # There is a newer event now |
|
55 |
+ @checker.error "oh no!" |
|
56 |
+ @checker.reload.should_not be_working # There is a recent error |
|
61 | 57 |
|
62 |
- stubbed_time = 2.days.from_now |
|
63 |
- @checker.reload.should_not be_working # Two days have passed without a new event having been created |
|
64 |
- end |
|
65 |
- end |
|
58 |
+ stubbed_time = 20.minutes.from_now |
|
59 |
+ @checker.events.delete_all |
|
60 |
+ @checker.check |
|
61 |
+ @checker.reload.should be_working # There is a newer event now |
|
66 | 62 |
|
67 |
- describe "parsing" do |
|
68 |
- it "parses CSS" do |
|
69 |
- @checker.check |
|
70 |
- event = Event.last |
|
71 |
- event.payload['url'].should == "http://imgs.xkcd.com/comics/evolving.png" |
|
72 |
- event.payload['title'].should =~ /^Biologists play reverse/ |
|
63 |
+ stubbed_time = 2.days.from_now |
|
64 |
+ @checker.reload.should_not be_working # Two days have passed without a new event having been created |
|
65 |
+ end |
|
73 | 66 |
end |
74 | 67 |
|
75 |
- it "should turn relative urls to absolute" do |
|
76 |
- rel_site = { |
|
77 |
- 'name' => "XKCD", |
|
78 |
- 'expected_update_period_in_days' => 2, |
|
79 |
- 'type' => "html", |
|
80 |
- 'url' => "http://xkcd.com", |
|
81 |
- 'mode' => :on_change, |
|
82 |
- 'extract' => { |
|
83 |
- 'url' => {'css' => "#topLeft a", 'attr' => "href"}, |
|
84 |
- 'title' => {'css' => "#topLeft a", 'text' => "true"} |
|
85 |
- } |
|
86 |
- } |
|
87 |
- rel = Agents::WebsiteAgent.new(:name => "xkcd", :options => rel_site) |
|
88 |
- rel.user = users(:bob) |
|
89 |
- rel.save! |
|
90 |
- rel.check |
|
91 |
- event = Event.last |
|
92 |
- event.payload['url'].should == "http://xkcd.com/about" |
|
93 |
- end |
|
68 |
+ describe "parsing" do |
|
69 |
+ it "parses CSS" do |
|
70 |
+ @checker.check |
|
71 |
+ event = Event.last |
|
72 |
+ event.payload['url'].should == "http://imgs.xkcd.com/comics/evolving.png" |
|
73 |
+ event.payload['title'].should =~ /^Biologists play reverse/ |
|
74 |
+ end |
|
94 | 75 |
|
95 |
- describe "JSON" do |
|
96 |
- it "works with paths" do |
|
97 |
- json = { |
|
98 |
- 'response' => { |
|
99 |
- 'version' => 2, |
|
100 |
- 'title' => "hello!" |
|
101 |
- } |
|
102 |
- } |
|
103 |
- stub_request(:any, /json-site/).to_return(:body => json.to_json, :status => 200) |
|
104 |
- site = { |
|
105 |
- 'name' => "Some JSON Response", |
|
76 |
+ it "should turn relative urls to absolute" do |
|
77 |
+ rel_site = { |
|
78 |
+ 'name' => "XKCD", |
|
106 | 79 |
'expected_update_period_in_days' => 2, |
107 |
- 'type' => "json", |
|
108 |
- 'url' => "http://json-site.com", |
|
109 |
- 'mode' => 'on_change', |
|
80 |
+ 'type' => "html", |
|
81 |
+ 'url' => "http://xkcd.com", |
|
82 |
+ 'mode' => :on_change, |
|
110 | 83 |
'extract' => { |
111 |
- 'version' => {'path' => "response.version"}, |
|
112 |
- 'title' => {'path' => "response.title"} |
|
84 |
+ 'url' => {'css' => "#topLeft a", 'attr' => "href"}, |
|
85 |
+ 'title' => {'css' => "#topLeft a", 'text' => "true"} |
|
113 | 86 |
} |
114 | 87 |
} |
115 |
- checker = Agents::WebsiteAgent.new(:name => "Weather Site", :options => site) |
|
116 |
- checker.user = users(:bob) |
|
117 |
- checker.save! |
|
118 |
- |
|
119 |
- checker.check |
|
88 |
+ rel = Agents::WebsiteAgent.new(:name => "xkcd", :options => rel_site) |
|
89 |
+ rel.user = users(:bob) |
|
90 |
+ rel.save! |
|
91 |
+ rel.check |
|
120 | 92 |
event = Event.last |
121 |
- event.payload['version'].should == 2 |
|
122 |
- event.payload['title'].should == "hello!" |
|
93 |
+ event.payload['url'].should == "http://xkcd.com/about" |
|
123 | 94 |
end |
124 | 95 |
|
125 |
- it "can handle arrays" do |
|
126 |
- json = { |
|
127 |
- 'response' => { |
|
128 |
- 'data' => [ |
|
129 |
- {'title' => "first", 'version' => 2}, |
|
130 |
- {'title' => "second", 'version' => 2.5} |
|
131 |
- ] |
|
96 |
+ describe "JSON" do |
|
97 |
+ it "works with paths" do |
|
98 |
+ json = { |
|
99 |
+ 'response' => { |
|
100 |
+ 'version' => 2, |
|
101 |
+ 'title' => "hello!" |
|
102 |
+ } |
|
132 | 103 |
} |
133 |
- } |
|
134 |
- stub_request(:any, /json-site/).to_return(:body => json.to_json, :status => 200) |
|
135 |
- site = { |
|
136 |
- 'name' => "Some JSON Response", |
|
137 |
- 'expected_update_period_in_days' => 2, |
|
138 |
- 'type' => "json", |
|
139 |
- 'url' => "http://json-site.com", |
|
140 |
- 'mode' => 'on_change', |
|
141 |
- 'extract' => { |
|
142 |
- :title => {'path' => "response.data[*].title"}, |
|
143 |
- :version => {'path' => "response.data[*].version"} |
|
104 |
+ stub_request(:any, /json-site/).to_return(:body => json.to_json, :status => 200) |
|
105 |
+ site = { |
|
106 |
+ 'name' => "Some JSON Response", |
|
107 |
+ 'expected_update_period_in_days' => 2, |
|
108 |
+ 'type' => "json", |
|
109 |
+ 'url' => "http://json-site.com", |
|
110 |
+ 'mode' => 'on_change', |
|
111 |
+ 'extract' => { |
|
112 |
+ 'version' => {'path' => "response.version"}, |
|
113 |
+ 'title' => {'path' => "response.title"} |
|
114 |
+ } |
|
144 | 115 |
} |
145 |
- } |
|
146 |
- checker = Agents::WebsiteAgent.new(:name => "Weather Site", :options => site) |
|
147 |
- checker.user = users(:bob) |
|
148 |
- checker.save! |
|
116 |
+ checker = Agents::WebsiteAgent.new(:name => "Weather Site", :options => site) |
|
117 |
+ checker.user = users(:bob) |
|
118 |
+ checker.save! |
|
149 | 119 |
|
150 |
- lambda { |
|
151 | 120 |
checker.check |
152 |
- }.should change { Event.count }.by(2) |
|
153 |
- |
|
154 |
- event = Event.all[-1] |
|
155 |
- event.payload['version'].should == 2.5 |
|
156 |
- event.payload['title'].should == "second" |
|
121 |
+ event = Event.last |
|
122 |
+ event.payload['version'].should == 2 |
|
123 |
+ event.payload['title'].should == "hello!" |
|
124 |
+ end |
|
125 |
+ |
|
126 |
+ it "can handle arrays" do |
|
127 |
+ json = { |
|
128 |
+ 'response' => { |
|
129 |
+ 'data' => [ |
|
130 |
+ {'title' => "first", 'version' => 2}, |
|
131 |
+ {'title' => "second", 'version' => 2.5} |
|
132 |
+ ] |
|
133 |
+ } |
|
134 |
+ } |
|
135 |
+ stub_request(:any, /json-site/).to_return(:body => json.to_json, :status => 200) |
|
136 |
+ site = { |
|
137 |
+ 'name' => "Some JSON Response", |
|
138 |
+ 'expected_update_period_in_days' => 2, |
|
139 |
+ 'type' => "json", |
|
140 |
+ 'url' => "http://json-site.com", |
|
141 |
+ 'mode' => 'on_change', |
|
142 |
+ 'extract' => { |
|
143 |
+ :title => {'path' => "response.data[*].title"}, |
|
144 |
+ :version => {'path' => "response.data[*].version"} |
|
145 |
+ } |
|
146 |
+ } |
|
147 |
+ checker = Agents::WebsiteAgent.new(:name => "Weather Site", :options => site) |
|
148 |
+ checker.user = users(:bob) |
|
149 |
+ checker.save! |
|
150 |
+ |
|
151 |
+ lambda { |
|
152 |
+ checker.check |
|
153 |
+ }.should change { Event.count }.by(2) |
|
154 |
+ |
|
155 |
+ event = Event.all[-1] |
|
156 |
+ event.payload['version'].should == 2.5 |
|
157 |
+ event.payload['title'].should == "second" |
|
158 |
+ |
|
159 |
+ event = Event.all[-2] |
|
160 |
+ event.payload['version'].should == 2 |
|
161 |
+ event.payload['title'].should == "first" |
|
162 |
+ end |
|
163 |
+ |
|
164 |
+ it "stores the whole object if :extract is not specified" do |
|
165 |
+ json = { |
|
166 |
+ 'response' => { |
|
167 |
+ 'version' => 2, |
|
168 |
+ 'title' => "hello!" |
|
169 |
+ } |
|
170 |
+ } |
|
171 |
+ stub_request(:any, /json-site/).to_return(:body => json.to_json, :status => 200) |
|
172 |
+ site = { |
|
173 |
+ 'name' => "Some JSON Response", |
|
174 |
+ 'expected_update_period_in_days' => 2, |
|
175 |
+ 'type' => "json", |
|
176 |
+ 'url' => "http://json-site.com", |
|
177 |
+ 'mode' => 'on_change' |
|
178 |
+ } |
|
179 |
+ checker = Agents::WebsiteAgent.new(:name => "Weather Site", :options => site) |
|
180 |
+ checker.user = users(:bob) |
|
181 |
+ checker.save! |
|
157 | 182 |
|
158 |
- event = Event.all[-2] |
|
159 |
- event.payload['version'].should == 2 |
|
160 |
- event.payload['title'].should == "first" |
|
183 |
+ checker.check |
|
184 |
+ event = Event.last |
|
185 |
+ event.payload['response']['version'].should == 2 |
|
186 |
+ event.payload['response']['title'].should == "hello!" |
|
187 |
+ end |
|
161 | 188 |
end |
189 |
+ end |
|
190 |
+ end |
|
162 | 191 |
|
163 |
- it "stores the whole object if :extract is not specified" do |
|
164 |
- json = { |
|
165 |
- 'response' => { |
|
166 |
- 'version' => 2, |
|
167 |
- 'title' => "hello!" |
|
168 |
- } |
|
169 |
- } |
|
170 |
- stub_request(:any, /json-site/).to_return(:body => json.to_json, :status => 200) |
|
171 |
- site = { |
|
172 |
- 'name' => "Some JSON Response", |
|
173 |
- 'expected_update_period_in_days' => 2, |
|
174 |
- 'type' => "json", |
|
175 |
- 'url' => "http://json-site.com", |
|
176 |
- 'mode' => 'on_change' |
|
177 |
- } |
|
178 |
- checker = Agents::WebsiteAgent.new(:name => "Weather Site", :options => site) |
|
179 |
- checker.user = users(:bob) |
|
180 |
- checker.save! |
|
192 |
+ describe "checking with http basic auth" do |
|
193 |
+ before do |
|
194 |
+ stub_request(:any, /user:pass/).to_return(:body => File.read(Rails.root.join("spec/data_fixtures/xkcd.html")), :status => 200) |
|
195 |
+ @site = { |
|
196 |
+ 'name' => "XKCD", |
|
197 |
+ 'expected_update_period_in_days' => 2, |
|
198 |
+ 'type' => "html", |
|
199 |
+ 'url' => "http://www.example.com", |
|
200 |
+ 'mode' => 'on_change', |
|
201 |
+ 'extract' => { |
|
202 |
+ 'url' => {'css' => "#comic img", 'attr' => "src"}, |
|
203 |
+ 'title' => {'css' => "#comic img", 'attr' => "title"} |
|
204 |
+ }, |
|
205 |
+ 'basic_auth' => "user:pass" |
|
206 |
+ } |
|
207 |
+ @checker = Agents::WebsiteAgent.new(:name => "auth", :options => @site) |
|
208 |
+ @checker.user = users(:bob) |
|
209 |
+ @checker.save! |
|
210 |
+ end |
|
181 | 211 |
|
182 |
- checker.check |
|
183 |
- event = Event.last |
|
184 |
- event.payload['response']['version'].should == 2 |
|
185 |
- event.payload['response']['title'].should == "hello!" |
|
212 |
+ describe "#check" do |
|
213 |
+ it "should check for changes" do |
|
214 |
+ lambda { @checker.check }.should change { Event.count }.by(1) |
|
215 |
+ lambda { @checker.check }.should_not change { Event.count } |
|
186 | 216 |
end |
187 | 217 |
end |
188 | 218 |
end |
219 |
+ |
|
189 | 220 |
end |