diff --git a/src/logic.js b/src/logic.js index 4ff8282..2d4a5c7 100644 --- a/src/logic.js +++ b/src/logic.js @@ -221,7 +221,7 @@ const open_browser = async () => { return browser; }; -const register_upcoming_events_listener = (endpoint, page) => { +const register_page_scraper = (endpoint, page) => { let responses = []; return new Promise((resolve, reject) => { page.on('response', async (response) => { @@ -249,13 +249,10 @@ const register_upcoming_events_listener = (endpoint, page) => { }); }; -const get_upcoming_events = async (browser, page_id) => { +const get_page_events = async (browser, page_id) => { let scraping = true; const facebook_page = await browser.newPage(); - const upcoming_events = register_upcoming_events_listener( - graphql_endpoint, - facebook_page, - ) + const upcoming_events = register_page_scraper(graphql_endpoint, facebook_page) .then((upcoming_events) => { scraping = false; return upcoming_events; @@ -285,5 +282,5 @@ module.exports = { open_browser, parse_args, read_previous_events, - get_upcoming_events, + get_page_events, }; diff --git a/src/scrape.js b/src/scrape.js index 1a65d33..b3c8770 100644 --- a/src/scrape.js +++ b/src/scrape.js @@ -2,7 +2,7 @@ const { pathOr } = require('ramda'); const { create_images_directory, - get_upcoming_events, + get_page_events, open_browser, parse_args, read_previous_events, @@ -16,13 +16,13 @@ const { page_ids, output, events: event_file } = parse_args( create_images_directory('./img'); const previous_events = await read_previous_events(event_file); - const browser = await open_browser(); let events = []; for (let page_id of page_ids) { - const new_events = get_upcoming_events(browser, page_id); + const upcoming_only = true; + const new_events = get_page_events(browser, page_id, upcoming_only); events = merge_edges(new_events, events); events = events.filter(