rss_agent_spec.rb 5.9KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162
  1. require 'spec_helper'
  2. describe Agents::RssAgent do
  3. before do
  4. @valid_options = {
  5. 'expected_update_period_in_days' => "2",
  6. 'url' => "https://github.com/cantino/huginn/commits/master.atom",
  7. }
  8. stub_request(:any, /github.com/).to_return(:body => File.read(Rails.root.join("spec/data_fixtures/github_rss.atom")), :status => 200)
  9. stub_request(:any, /SlickdealsnetFP/).to_return(:body => File.read(Rails.root.join("spec/data_fixtures/slickdeals.atom")), :status => 200)
  10. end
  11. let(:agent) do
  12. _agent = Agents::RssAgent.new(:name => "rss feed", :options => @valid_options)
  13. _agent.user = users(:bob)
  14. _agent.save!
  15. _agent
  16. end
  17. it_behaves_like WebRequestConcern
  18. describe "validations" do
  19. it "should validate the presence of url" do
  20. agent.options['url'] = "http://google.com"
  21. expect(agent).to be_valid
  22. agent.options['url'] = ["http://google.com", "http://yahoo.com"]
  23. expect(agent).to be_valid
  24. agent.options['url'] = ""
  25. expect(agent).not_to be_valid
  26. agent.options['url'] = nil
  27. expect(agent).not_to be_valid
  28. end
  29. it "should validate the presence and numericality of expected_update_period_in_days" do
  30. agent.options['expected_update_period_in_days'] = "5"
  31. expect(agent).to be_valid
  32. agent.options['expected_update_period_in_days'] = "wut?"
  33. expect(agent).not_to be_valid
  34. agent.options['expected_update_period_in_days'] = 0
  35. expect(agent).not_to be_valid
  36. agent.options['expected_update_period_in_days'] = nil
  37. expect(agent).not_to be_valid
  38. agent.options['expected_update_period_in_days'] = ""
  39. expect(agent).not_to be_valid
  40. end
  41. end
  42. describe "emitting RSS events" do
  43. it "should emit items as events" do
  44. expect {
  45. agent.check
  46. }.to change { agent.events.count }.by(20)
  47. first, *, last = agent.events.last(20)
  48. expect(first.payload['url']).to eq("https://github.com/cantino/huginn/commit/d0a844662846cf3c83b94c637c1803f03db5a5b0")
  49. expect(first.payload['urls']).to eq(["https://github.com/cantino/huginn/commit/d0a844662846cf3c83b94c637c1803f03db5a5b0"])
  50. expect(last.payload['url']).to eq("https://github.com/cantino/huginn/commit/d465158f77dcd9078697e6167b50abbfdfa8b1af")
  51. expect(last.payload['urls']).to eq(["https://github.com/cantino/huginn/commit/d465158f77dcd9078697e6167b50abbfdfa8b1af"])
  52. end
  53. it "should emit items as events in the order specified in the events_order option" do
  54. expect {
  55. agent.options['events_order'] = ['{{title | replace_regex: "^[[:space:]]+", "" }}']
  56. agent.check
  57. }.to change { agent.events.count }.by(20)
  58. first, *, last = agent.events.last(20)
  59. expect(first.payload['title'].strip).to eq('upgrade rails and gems')
  60. expect(first.payload['url']).to eq("https://github.com/cantino/huginn/commit/87a7abda23a82305d7050ac0bb400ce36c863d01")
  61. expect(first.payload['urls']).to eq(["https://github.com/cantino/huginn/commit/87a7abda23a82305d7050ac0bb400ce36c863d01"])
  62. expect(last.payload['title'].strip).to eq('Dashed line in a diagram indicates propagate_immediately being false.')
  63. expect(last.payload['url']).to eq("https://github.com/cantino/huginn/commit/0e80f5341587aace2c023b06eb9265b776ac4535")
  64. expect(last.payload['urls']).to eq(["https://github.com/cantino/huginn/commit/0e80f5341587aace2c023b06eb9265b776ac4535"])
  65. end
  66. it "should track ids and not re-emit the same item when seen again" do
  67. agent.check
  68. expect(agent.memory['seen_ids']).to eq(agent.events.map {|e| e.payload['id'] })
  69. newest_id = agent.memory['seen_ids'][0]
  70. expect(agent.events.first.payload['id']).to eq(newest_id)
  71. agent.memory['seen_ids'] = agent.memory['seen_ids'][1..-1] # forget the newest id
  72. expect {
  73. agent.check
  74. }.to change { agent.events.count }.by(1)
  75. expect(agent.events.first.payload['id']).to eq(newest_id)
  76. expect(agent.memory['seen_ids'][0]).to eq(newest_id)
  77. end
  78. it "should truncate the seen_ids in memory at 500 items" do
  79. agent.memory['seen_ids'] = ['x'] * 490
  80. agent.check
  81. expect(agent.memory['seen_ids'].length).to eq(500)
  82. end
  83. it "should support an array of URLs" do
  84. agent.options['url'] = ["https://github.com/cantino/huginn/commits/master.atom", "http://feeds.feedburner.com/SlickdealsnetFP?format=atom"]
  85. agent.save!
  86. expect {
  87. agent.check
  88. }.to change { agent.events.count }.by(20 + 79)
  89. end
  90. it "should fetch one event per run" do
  91. agent.options['url'] = ["https://github.com/cantino/huginn/commits/master.atom"]
  92. agent.options['max_events_per_run'] = 1
  93. agent.check
  94. expect(agent.events.count).to eq(1)
  95. end
  96. it "should fetch all events per run" do
  97. agent.options['url'] = ["https://github.com/cantino/huginn/commits/master.atom"]
  98. # <= 0 should ignore option and get all
  99. agent.options['max_events_per_run'] = 0
  100. agent.check
  101. expect(agent.events.count).to eq(20)
  102. agent.options['max_events_per_run'] = -1
  103. expect {
  104. agent.check
  105. }.to_not change { agent.events.count }
  106. end
  107. end
  108. context "when no ids are available" do
  109. before do
  110. @valid_options['url'] = 'http://feeds.feedburner.com/SlickdealsnetFP?format=atom'
  111. end
  112. it "calculates content MD5 sums" do
  113. expect {
  114. agent.check
  115. }.to change { agent.events.count }.by(79)
  116. expect(agent.memory['seen_ids']).to eq(agent.events.map {|e| Digest::MD5.hexdigest(e.payload['content']) })
  117. end
  118. end
  119. describe 'logging errors with the feed url' do
  120. it 'includes the feed URL when an exception is raised' do
  121. mock(FeedNormalizer::FeedNormalizer).parse(anything, :loose => true) { raise StandardError.new("Some error!") }
  122. expect(lambda {
  123. agent.check
  124. }).not_to raise_error
  125. expect(agent.logs.last.message).to match(%r[Failed to fetch https://github.com])
  126. end
  127. end
  128. end