Нет описания http://j1x-huginn.herokuapp.com

website_agent_spec.rb 26KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756
  1. require 'spec_helper'
  2. describe Agents::WebsiteAgent do
  3. describe "checking without basic auth" do
  4. before do
  5. stub_request(:any, /xkcd/).to_return(body: File.read(Rails.root.join("spec/data_fixtures/xkcd.html")),
  6. status: 200,
  7. headers: {
  8. 'X-Status-Message' => 'OK'
  9. })
  10. @valid_options = {
  11. 'name' => "XKCD",
  12. 'expected_update_period_in_days' => "2",
  13. 'type' => "html",
  14. 'url' => "http://xkcd.com",
  15. 'mode' => 'on_change',
  16. 'extract' => {
  17. 'url' => { 'css' => "#comic img", 'value' => "@src" },
  18. 'title' => { 'css' => "#comic img", 'value' => "@alt" },
  19. 'hovertext' => { 'css' => "#comic img", 'value' => "@title" }
  20. }
  21. }
  22. @checker = Agents::WebsiteAgent.new(:name => "xkcd", :options => @valid_options, :keep_events_for => 2.days)
  23. @checker.user = users(:bob)
  24. @checker.save!
  25. end
  26. it_behaves_like WebRequestConcern
  27. describe "validations" do
  28. before do
  29. expect(@checker).to be_valid
  30. end
  31. it "should validate the integer fields" do
  32. @checker.options['expected_update_period_in_days'] = "2"
  33. expect(@checker).to be_valid
  34. @checker.options['expected_update_period_in_days'] = "nonsense"
  35. expect(@checker).not_to be_valid
  36. end
  37. it "should validate uniqueness_look_back" do
  38. @checker.options['uniqueness_look_back'] = "nonsense"
  39. expect(@checker).not_to be_valid
  40. @checker.options['uniqueness_look_back'] = "2"
  41. expect(@checker).to be_valid
  42. end
  43. it "should validate mode" do
  44. @checker.options['mode'] = "nonsense"
  45. expect(@checker).not_to be_valid
  46. @checker.options['mode'] = "on_change"
  47. expect(@checker).to be_valid
  48. @checker.options['mode'] = "all"
  49. expect(@checker).to be_valid
  50. @checker.options['mode'] = ""
  51. expect(@checker).to be_valid
  52. end
  53. it "should validate the force_encoding option" do
  54. @checker.options['force_encoding'] = ''
  55. expect(@checker).to be_valid
  56. @checker.options['force_encoding'] = 'UTF-8'
  57. expect(@checker).to be_valid
  58. @checker.options['force_encoding'] = ['UTF-8']
  59. expect(@checker).not_to be_valid
  60. @checker.options['force_encoding'] = 'UTF-42'
  61. expect(@checker).not_to be_valid
  62. end
  63. context "in 'json' type" do
  64. it "should ensure that all extractions have a 'path'" do
  65. @checker.options['type'] = 'json'
  66. @checker.options['extract'] = {
  67. 'url' => { 'foo' => 'bar' },
  68. }
  69. expect(@checker).to_not be_valid
  70. expect(@checker.errors_on(:base)).to include(/When type is json, all extractions must have a path attribute/)
  71. @checker.options['type'] = 'json'
  72. @checker.options['extract'] = {
  73. 'url' => { 'path' => 'bar' },
  74. }
  75. expect(@checker).to be_valid
  76. end
  77. end
  78. end
  79. describe "#check" do
  80. it "should check for changes (and update Event.expires_at)" do
  81. expect { @checker.check }.to change { Event.count }.by(1)
  82. event = Event.last
  83. sleep 2
  84. expect { @checker.check }.not_to change { Event.count }
  85. update_event = Event.last
  86. expect(update_event.expires_at).not_to eq(event.expires_at)
  87. end
  88. it "should always save events when in :all mode" do
  89. expect {
  90. @valid_options['mode'] = 'all'
  91. @checker.options = @valid_options
  92. @checker.check
  93. @checker.check
  94. }.to change { Event.count }.by(2)
  95. end
  96. it "should take uniqueness_look_back into account during deduplication" do
  97. @valid_options['mode'] = 'all'
  98. @checker.options = @valid_options
  99. @checker.check
  100. @checker.check
  101. event = Event.last
  102. event.payload = "{}"
  103. event.save
  104. expect {
  105. @valid_options['mode'] = 'on_change'
  106. @valid_options['uniqueness_look_back'] = 2
  107. @checker.options = @valid_options
  108. @checker.check
  109. }.not_to change { Event.count }
  110. expect {
  111. @valid_options['mode'] = 'on_change'
  112. @valid_options['uniqueness_look_back'] = 1
  113. @checker.options = @valid_options
  114. @checker.check
  115. }.to change { Event.count }.by(1)
  116. end
  117. it "should log an error if the number of results for a set of extraction patterns differs" do
  118. @valid_options['extract']['url']['css'] = "div"
  119. @checker.options = @valid_options
  120. @checker.check
  121. expect(@checker.logs.first.message).to match(/Got an uneven number of matches/)
  122. end
  123. it "should accept an array for url" do
  124. @valid_options['url'] = ["http://xkcd.com/1/", "http://xkcd.com/2/"]
  125. @checker.options = @valid_options
  126. expect { @checker.save! }.not_to raise_error;
  127. expect { @checker.check }.not_to raise_error;
  128. end
  129. it "should parse events from all urls in array" do
  130. expect {
  131. @valid_options['url'] = ["http://xkcd.com/", "http://xkcd.com/"]
  132. @valid_options['mode'] = 'all'
  133. @checker.options = @valid_options
  134. @checker.check
  135. }.to change { Event.count }.by(2)
  136. end
  137. it "should follow unique rules when parsing array of urls" do
  138. expect {
  139. @valid_options['url'] = ["http://xkcd.com/", "http://xkcd.com/"]
  140. @checker.options = @valid_options
  141. @checker.check
  142. }.to change { Event.count }.by(1)
  143. end
  144. end
  145. describe 'unzipping' do
  146. it 'should unzip with unzip option' do
  147. json = {
  148. 'response' => {
  149. 'version' => 2,
  150. 'title' => "hello!"
  151. }
  152. }
  153. zipped = ActiveSupport::Gzip.compress(json.to_json)
  154. stub_request(:any, /gzip/).to_return(:body => zipped, :status => 200)
  155. site = {
  156. 'name' => "Some JSON Response",
  157. 'expected_update_period_in_days' => "2",
  158. 'type' => "json",
  159. 'url' => "http://gzip.com",
  160. 'mode' => 'on_change',
  161. 'extract' => {
  162. 'version' => { 'path' => 'response.version' },
  163. },
  164. 'unzip' => 'gzip',
  165. }
  166. checker = Agents::WebsiteAgent.new(:name => "Weather Site", :options => site)
  167. checker.user = users(:bob)
  168. checker.save!
  169. checker.check
  170. event = Event.last
  171. expect(event.payload['version']).to eq(2)
  172. end
  173. end
  174. describe 'encoding' do
  175. it 'should be forced with force_encoding option' do
  176. huginn = "\u{601d}\u{8003}"
  177. stub_request(:any, /no-encoding/).to_return(:body => {
  178. :value => huginn,
  179. }.to_json.encode(Encoding::EUC_JP), :headers => {
  180. 'Content-Type' => 'application/json',
  181. }, :status => 200)
  182. site = {
  183. 'name' => "Some JSON Response",
  184. 'expected_update_period_in_days' => "2",
  185. 'type' => "json",
  186. 'url' => "http://no-encoding.example.com",
  187. 'mode' => 'on_change',
  188. 'extract' => {
  189. 'value' => { 'path' => 'value' },
  190. },
  191. 'force_encoding' => 'EUC-JP',
  192. }
  193. checker = Agents::WebsiteAgent.new(:name => "No Encoding Site", :options => site)
  194. checker.user = users(:bob)
  195. checker.save!
  196. checker.check
  197. event = Event.last
  198. expect(event.payload['value']).to eq(huginn)
  199. end
  200. it 'should be overridden with force_encoding option' do
  201. huginn = "\u{601d}\u{8003}"
  202. stub_request(:any, /wrong-encoding/).to_return(:body => {
  203. :value => huginn,
  204. }.to_json.encode(Encoding::EUC_JP), :headers => {
  205. 'Content-Type' => 'application/json; UTF-8',
  206. }, :status => 200)
  207. site = {
  208. 'name' => "Some JSON Response",
  209. 'expected_update_period_in_days' => "2",
  210. 'type' => "json",
  211. 'url' => "http://wrong-encoding.example.com",
  212. 'mode' => 'on_change',
  213. 'extract' => {
  214. 'value' => { 'path' => 'value' },
  215. },
  216. 'force_encoding' => 'EUC-JP',
  217. }
  218. checker = Agents::WebsiteAgent.new(:name => "Wrong Encoding Site", :options => site)
  219. checker.user = users(:bob)
  220. checker.save!
  221. checker.check
  222. event = Event.last
  223. expect(event.payload['value']).to eq(huginn)
  224. end
  225. end
  226. describe '#working?' do
  227. it 'checks if events have been received within the expected receive period' do
  228. stubbed_time = Time.now
  229. stub(Time).now { stubbed_time }
  230. expect(@checker).not_to be_working # No events created
  231. @checker.check
  232. expect(@checker.reload).to be_working # Just created events
  233. @checker.error "oh no!"
  234. expect(@checker.reload).not_to be_working # There is a recent error
  235. stubbed_time = 20.minutes.from_now
  236. @checker.events.delete_all
  237. @checker.check
  238. expect(@checker.reload).to be_working # There is a newer event now
  239. stubbed_time = 2.days.from_now
  240. expect(@checker.reload).not_to be_working # Two days have passed without a new event having been created
  241. end
  242. end
  243. describe "parsing" do
  244. it "parses CSS" do
  245. @checker.check
  246. event = Event.last
  247. expect(event.payload['url']).to eq("http://imgs.xkcd.com/comics/evolving.png")
  248. expect(event.payload['title']).to eq("Evolving")
  249. expect(event.payload['hovertext']).to match(/^Biologists play reverse/)
  250. end
  251. it "parses XPath" do
  252. @valid_options['extract'].each { |key, value|
  253. value.delete('css')
  254. value['xpath'] = "//*[@id='comic']//img"
  255. }
  256. @checker.options = @valid_options
  257. @checker.check
  258. event = Event.last
  259. expect(event.payload['url']).to eq("http://imgs.xkcd.com/comics/evolving.png")
  260. expect(event.payload['title']).to eq("Evolving")
  261. expect(event.payload['hovertext']).to match(/^Biologists play reverse/)
  262. end
  263. it "should turn relative urls to absolute" do
  264. rel_site = {
  265. 'name' => "XKCD",
  266. 'expected_update_period_in_days' => "2",
  267. 'type' => "html",
  268. 'url' => "http://xkcd.com",
  269. 'mode' => "on_change",
  270. 'extract' => {
  271. 'url' => {'css' => "#topLeft a", 'value' => "@href"},
  272. }
  273. }
  274. rel = Agents::WebsiteAgent.new(:name => "xkcd", :options => rel_site)
  275. rel.user = users(:bob)
  276. rel.save!
  277. rel.check
  278. event = Event.last
  279. expect(event.payload['url']).to eq("http://xkcd.com/about")
  280. end
  281. it "should return an integer value if XPath evaluates to one" do
  282. rel_site = {
  283. 'name' => "XKCD",
  284. 'expected_update_period_in_days' => 2,
  285. 'type' => "html",
  286. 'url' => "http://xkcd.com",
  287. 'mode' => "on_change",
  288. 'extract' => {
  289. 'num_links' => {'css' => "#comicLinks", 'value' => "count(./a)"}
  290. }
  291. }
  292. rel = Agents::WebsiteAgent.new(:name => "xkcd", :options => rel_site)
  293. rel.user = users(:bob)
  294. rel.save!
  295. rel.check
  296. event = Event.last
  297. expect(event.payload['num_links']).to eq("9")
  298. end
  299. it "should return all texts concatenated if XPath returns many text nodes" do
  300. rel_site = {
  301. 'name' => "XKCD",
  302. 'expected_update_period_in_days' => 2,
  303. 'type' => "html",
  304. 'url' => "http://xkcd.com",
  305. 'mode' => "on_change",
  306. 'extract' => {
  307. 'slogan' => {'css' => "#slogan", 'value' => ".//text()"}
  308. }
  309. }
  310. rel = Agents::WebsiteAgent.new(:name => "xkcd", :options => rel_site)
  311. rel.user = users(:bob)
  312. rel.save!
  313. rel.check
  314. event = Event.last
  315. expect(event.payload['slogan']).to eq("A webcomic of romance, sarcasm, math, and language.")
  316. end
  317. it "should interpolate _response_" do
  318. @valid_options['extract']['response_info'] =
  319. @valid_options['extract']['url'].merge(
  320. 'value' => '"{{ "The reponse was " | append:_response_.status | append:" " | append:_response_.headers.X-Status-Message | append:"." }}"'
  321. )
  322. @checker.options = @valid_options
  323. @checker.check
  324. event = Event.last
  325. expect(event.payload['response_info']).to eq('The reponse was 200 OK.')
  326. end
  327. describe "XML" do
  328. before do
  329. stub_request(:any, /github_rss/).to_return(
  330. body: File.read(Rails.root.join("spec/data_fixtures/github_rss.atom")),
  331. status: 200
  332. )
  333. @checker = Agents::WebsiteAgent.new(name: 'github', options: {
  334. 'name' => 'GitHub',
  335. 'expected_update_period_in_days' => '2',
  336. 'type' => 'xml',
  337. 'url' => 'http://example.com/github_rss.atom',
  338. 'mode' => 'on_change',
  339. 'extract' => {
  340. 'title' => { 'xpath' => '/feed/entry', 'value' => 'normalize-space(./title)' },
  341. 'url' => { 'xpath' => '/feed/entry', 'value' => './link[1]/@href' },
  342. 'thumbnail' => { 'xpath' => '/feed/entry', 'value' => './thumbnail/@url' },
  343. }
  344. }, keep_events_for: 2.days)
  345. @checker.user = users(:bob)
  346. @checker.save!
  347. end
  348. it "works with XPath" do
  349. expect {
  350. @checker.check
  351. }.to change { Event.count }.by(20)
  352. event = Event.last
  353. expect(event.payload['title']).to eq('Shift to dev group')
  354. expect(event.payload['url']).to eq('https://github.com/cantino/huginn/commit/d465158f77dcd9078697e6167b50abbfdfa8b1af')
  355. expect(event.payload['thumbnail']).to eq('https://avatars3.githubusercontent.com/u/365751?s=30')
  356. end
  357. it "works with XPath with namespaces unstripped" do
  358. @checker.options['use_namespaces'] = 'true'
  359. @checker.save!
  360. expect {
  361. @checker.check
  362. }.to change { Event.count }.by(0)
  363. @checker.options['extract'] = {
  364. 'title' => { 'xpath' => '/xmlns:feed/xmlns:entry', 'value' => 'normalize-space(./xmlns:title)' },
  365. 'url' => { 'xpath' => '/xmlns:feed/xmlns:entry', 'value' => './xmlns:link[1]/@href' },
  366. 'thumbnail' => { 'xpath' => '/xmlns:feed/xmlns:entry', 'value' => './media:thumbnail/@url' },
  367. }
  368. @checker.save!
  369. expect {
  370. @checker.check
  371. }.to change { Event.count }.by(20)
  372. event = Event.last
  373. expect(event.payload['title']).to eq('Shift to dev group')
  374. expect(event.payload['url']).to eq('https://github.com/cantino/huginn/commit/d465158f77dcd9078697e6167b50abbfdfa8b1af')
  375. expect(event.payload['thumbnail']).to eq('https://avatars3.githubusercontent.com/u/365751?s=30')
  376. end
  377. it "works with CSS selectors" do
  378. @checker.options['extract'] = {
  379. 'title' => { 'css' => 'feed > entry', 'value' => 'normalize-space(./title)' },
  380. 'url' => { 'css' => 'feed > entry', 'value' => './link[1]/@href' },
  381. 'thumbnail' => { 'css' => 'feed > entry', 'value' => './thumbnail/@url' },
  382. }
  383. @checker.save!
  384. expect {
  385. @checker.check
  386. }.to change { Event.count }.by(20)
  387. event = Event.last
  388. expect(event.payload['title']).to be_empty
  389. expect(event.payload['thumbnail']).to be_empty
  390. @checker.options['extract'] = {
  391. 'title' => { 'css' => 'feed > entry', 'value' => 'normalize-space(./xmlns:title)' },
  392. 'url' => { 'css' => 'feed > entry', 'value' => './xmlns:link[1]/@href' },
  393. 'thumbnail' => { 'css' => 'feed > entry', 'value' => './media:thumbnail/@url' },
  394. }
  395. @checker.save!
  396. expect {
  397. @checker.check
  398. }.to change { Event.count }.by(20)
  399. event = Event.last
  400. expect(event.payload['title']).to eq('Shift to dev group')
  401. expect(event.payload['url']).to eq('https://github.com/cantino/huginn/commit/d465158f77dcd9078697e6167b50abbfdfa8b1af')
  402. expect(event.payload['thumbnail']).to eq('https://avatars3.githubusercontent.com/u/365751?s=30')
  403. end
  404. it "works with CSS selectors with namespaces stripped" do
  405. @checker.options['extract'] = {
  406. 'title' => { 'css' => 'feed > entry', 'value' => 'normalize-space(./title)' },
  407. 'url' => { 'css' => 'feed > entry', 'value' => './link[1]/@href' },
  408. 'thumbnail' => { 'css' => 'feed > entry', 'value' => './thumbnail/@url' },
  409. }
  410. @checker.options['use_namespaces'] = 'false'
  411. @checker.save!
  412. expect {
  413. @checker.check
  414. }.to change { Event.count }.by(20)
  415. event = Event.last
  416. expect(event.payload['title']).to eq('Shift to dev group')
  417. expect(event.payload['url']).to eq('https://github.com/cantino/huginn/commit/d465158f77dcd9078697e6167b50abbfdfa8b1af')
  418. expect(event.payload['thumbnail']).to eq('https://avatars3.githubusercontent.com/u/365751?s=30')
  419. end
  420. end
  421. describe "JSON" do
  422. it "works with paths" do
  423. json = {
  424. 'response' => {
  425. 'version' => 2,
  426. 'title' => "hello!"
  427. }
  428. }
  429. stub_request(:any, /json-site/).to_return(:body => json.to_json, :status => 200)
  430. site = {
  431. 'name' => "Some JSON Response",
  432. 'expected_update_period_in_days' => "2",
  433. 'type' => "json",
  434. 'url' => "http://json-site.com",
  435. 'mode' => 'on_change',
  436. 'extract' => {
  437. 'version' => {'path' => "response.version"},
  438. 'title' => {'path' => "response.title"}
  439. }
  440. }
  441. checker = Agents::WebsiteAgent.new(:name => "Weather Site", :options => site)
  442. checker.user = users(:bob)
  443. checker.save!
  444. checker.check
  445. event = Event.last
  446. expect(event.payload['version']).to eq(2)
  447. expect(event.payload['title']).to eq("hello!")
  448. end
  449. it "can handle arrays" do
  450. json = {
  451. 'response' => {
  452. 'data' => [
  453. {'title' => "first", 'version' => 2},
  454. {'title' => "second", 'version' => 2.5}
  455. ]
  456. }
  457. }
  458. stub_request(:any, /json-site/).to_return(:body => json.to_json, :status => 200)
  459. site = {
  460. 'name' => "Some JSON Response",
  461. 'expected_update_period_in_days' => "2",
  462. 'type' => "json",
  463. 'url' => "http://json-site.com",
  464. 'mode' => 'on_change',
  465. 'extract' => {
  466. :title => {'path' => "response.data[*].title"},
  467. :version => {'path' => "response.data[*].version"}
  468. }
  469. }
  470. checker = Agents::WebsiteAgent.new(:name => "Weather Site", :options => site)
  471. checker.user = users(:bob)
  472. checker.save!
  473. expect {
  474. checker.check
  475. }.to change { Event.count }.by(2)
  476. (event2, event1) = Event.last(2)
  477. expect(event1.payload['version']).to eq(2.5)
  478. expect(event1.payload['title']).to eq("second")
  479. expect(event2.payload['version']).to eq(2)
  480. expect(event2.payload['title']).to eq("first")
  481. end
  482. it "stores the whole object if :extract is not specified" do
  483. json = {
  484. 'response' => {
  485. 'version' => 2,
  486. 'title' => "hello!"
  487. }
  488. }
  489. stub_request(:any, /json-site/).to_return(:body => json.to_json, :status => 200)
  490. site = {
  491. 'name' => "Some JSON Response",
  492. 'expected_update_period_in_days' => "2",
  493. 'type' => "json",
  494. 'url' => "http://json-site.com",
  495. 'mode' => 'on_change'
  496. }
  497. checker = Agents::WebsiteAgent.new(:name => "Weather Site", :options => site)
  498. checker.user = users(:bob)
  499. checker.save!
  500. checker.check
  501. event = Event.last
  502. expect(event.payload['response']['version']).to eq(2)
  503. expect(event.payload['response']['title']).to eq("hello!")
  504. end
  505. end
  506. describe "text parsing" do
  507. before do
  508. stub_request(:any, /text-site/).to_return(body: <<-EOF, status: 200)
  509. water: wet
  510. fire: hot
  511. EOF
  512. site = {
  513. 'name' => 'Some Text Response',
  514. 'expected_update_period_in_days' => '2',
  515. 'type' => 'text',
  516. 'url' => 'http://text-site.com',
  517. 'mode' => 'on_change',
  518. 'extract' => {
  519. 'word' => { 'regexp' => '^(.+?): (.+)$', index: 1 },
  520. 'property' => { 'regexp' => '^(.+?): (.+)$', index: '2' },
  521. }
  522. }
  523. @checker = Agents::WebsiteAgent.new(name: 'Text Site', options: site)
  524. @checker.user = users(:bob)
  525. @checker.save!
  526. end
  527. it "works with regexp with named capture" do
  528. @checker.options = @checker.options.merge('extract' => {
  529. 'word' => { 'regexp' => '^(?<word>.+?): (?<property>.+)$', index: 'word' },
  530. 'property' => { 'regexp' => '^(?<word>.+?): (?<property>.+)$', index: 'property' },
  531. })
  532. expect {
  533. @checker.check
  534. }.to change { Event.count }.by(2)
  535. event1, event2 = Event.last(2)
  536. expect(event1.payload['word']).to eq('water')
  537. expect(event1.payload['property']).to eq('wet')
  538. expect(event2.payload['word']).to eq('fire')
  539. expect(event2.payload['property']).to eq('hot')
  540. end
  541. it "works with regexp" do
  542. expect {
  543. @checker.check
  544. }.to change { Event.count }.by(2)
  545. event1, event2 = Event.last(2)
  546. expect(event1.payload['word']).to eq('water')
  547. expect(event1.payload['property']).to eq('wet')
  548. expect(event2.payload['word']).to eq('fire')
  549. expect(event2.payload['property']).to eq('hot')
  550. end
  551. end
  552. end
  553. describe "#receive" do
  554. before do
  555. @event = Event.new
  556. @event.agent = agents(:bob_rain_notifier_agent)
  557. @event.payload = {
  558. 'url' => 'http://xkcd.com',
  559. 'link' => 'Random',
  560. }
  561. end
  562. it "should scrape from the url element in incoming event payload" do
  563. expect {
  564. @checker.options = @valid_options
  565. @checker.receive([@event])
  566. }.to change { Event.count }.by(1)
  567. end
  568. it "should use url_from_event as url to scrape if it exists when receiving an event" do
  569. stub = stub_request(:any, 'http://example.org/?url=http%3A%2F%2Fxkcd.com')
  570. @checker.options = @valid_options.merge(
  571. 'url_from_event' => 'http://example.org/?url={{url | uri_escape}}'
  572. )
  573. @checker.receive([@event])
  574. expect(stub).to have_been_requested
  575. end
  576. it "should interpolate values from incoming event payload" do
  577. expect {
  578. @valid_options['extract'] = {
  579. 'from' => {
  580. 'xpath' => '*[1]',
  581. 'value' => '{{url | to_xpath}}'
  582. },
  583. 'to' => {
  584. 'xpath' => '(//a[@href and text()={{link | to_xpath}}])[1]',
  585. 'value' => '@href'
  586. },
  587. }
  588. @checker.options = @valid_options
  589. @checker.receive([@event])
  590. }.to change { Event.count }.by(1)
  591. expect(Event.last.payload).to eq({
  592. 'from' => 'http://xkcd.com',
  593. 'to' => 'http://dynamic.xkcd.com/random/comic/',
  594. })
  595. end
  596. it "should interpolate values from incoming event payload and _response_" do
  597. @event.payload['title'] = 'XKCD'
  598. expect {
  599. @valid_options['extract'] = {
  600. 'response_info' => @valid_options['extract']['url'].merge(
  601. 'value' => '{% capture sentence %}The reponse from {{title}} was {{_response_.status}} {{_response_.headers.X-Status-Message}}.{% endcapture %}{{sentence | to_xpath}}'
  602. )
  603. }
  604. @checker.options = @valid_options
  605. @checker.receive([@event])
  606. }.to change { Event.count }.by(1)
  607. expect(Event.last.payload['response_info']).to eq('The reponse from XKCD was 200 OK.')
  608. end
  609. it "should support merging of events" do
  610. expect {
  611. @checker.options = @valid_options
  612. @checker.options[:mode] = "merge"
  613. @checker.receive([@event])
  614. }.to change { Event.count }.by(1)
  615. last_payload = Event.last.payload
  616. expect(last_payload['link']).to eq('Random')
  617. end
  618. end
  619. end
  620. describe "checking with http basic auth" do
  621. before do
  622. stub_request(:any, /example/).
  623. with(headers: { 'Authorization' => "Basic #{['user:pass'].pack('m').chomp}" }).
  624. to_return(:body => File.read(Rails.root.join("spec/data_fixtures/xkcd.html")), :status => 200)
  625. @valid_options = {
  626. 'name' => "XKCD",
  627. 'expected_update_period_in_days' => "2",
  628. 'type' => "html",
  629. 'url' => "http://www.example.com",
  630. 'mode' => 'on_change',
  631. 'extract' => {
  632. 'url' => { 'css' => "#comic img", 'value' => "@src" },
  633. 'title' => { 'css' => "#comic img", 'value' => "@alt" },
  634. 'hovertext' => { 'css' => "#comic img", 'value' => "@title" }
  635. },
  636. 'basic_auth' => "user:pass"
  637. }
  638. @checker = Agents::WebsiteAgent.new(:name => "auth", :options => @valid_options)
  639. @checker.user = users(:bob)
  640. @checker.save!
  641. end
  642. describe "#check" do
  643. it "should check for changes" do
  644. expect { @checker.check }.to change { Event.count }.by(1)
  645. expect { @checker.check }.not_to change { Event.count }
  646. end
  647. end
  648. end
  649. describe "checking with headers" do
  650. before do
  651. stub_request(:any, /example/).
  652. with(headers: { 'foo' => 'bar' }).
  653. to_return(:body => File.read(Rails.root.join("spec/data_fixtures/xkcd.html")), :status => 200)
  654. @valid_options = {
  655. 'name' => "XKCD",
  656. 'expected_update_period_in_days' => "2",
  657. 'type' => "html",
  658. 'url' => "http://www.example.com",
  659. 'mode' => 'on_change',
  660. 'headers' => { 'foo' => 'bar' },
  661. 'extract' => {
  662. 'url' => { 'css' => "#comic img", 'value' => "@src" },
  663. }
  664. }
  665. @checker = Agents::WebsiteAgent.new(:name => "ua", :options => @valid_options)
  666. @checker.user = users(:bob)
  667. @checker.save!
  668. end
  669. describe "#check" do
  670. it "should check for changes" do
  671. expect { @checker.check }.to change { Event.count }.by(1)
  672. end
  673. end
  674. end
  675. end