diff --git a/bin/hoopla.mjs b/bin/hoopla.mjs index 0bead66..1fd3766 100644 --- a/bin/hoopla.mjs +++ b/bin/hoopla.mjs @@ -1,10 +1,11 @@ import { get_upcoming_events } from '../src/hoopla/index.mjs'; import send from '../src/signal/send.mjs'; +import { get_credentials, updated } from '../src/util.mjs'; import fetch from 'node-fetch'; const prod = true; -let api, token; +let { api, token } = get_credentials(prod); if (prod) { api = 'http://10.0.0.210:8484'; @@ -15,41 +16,7 @@ if (prod) { } const headers = { 'Content-Type': 'application/json' }; - -const scrape = async (pageID) => { - try { - const res = await get_page_events({ - pageID, - get_upcoming_events: true, - get_past_events: false - }); - return res; - } catch (e) { - console.error(e); - } - return []; -}; const unix = (a) => parseInt(new Date(a).valueOf() / 1000, 10); -const sleep = (ms) => new Promise((res) => setTimeout(res, ms)); -const updated = (oldEvent, scrapedEvent) => { - let keys = [ - 'canceled', - 'end', - 'start', - 'draft', - 'facebook_id', - 'place_id', - 'name', - 'ticket_url' - ]; - for (let key of keys) { - if (oldEvent[key] != scrapedEvent[key]) { - console.log(124, key, oldEvent[key], '!=', scrapedEvent[key]); - return true; - } - } - return false; -}; (async () => { let resp = await fetch(`${api}/places/?token=${token}`); diff --git a/bin/scrape.mjs b/bin/scrape.mjs index 931dc95..0caaaff 100644 --- a/bin/scrape.mjs +++ b/bin/scrape.mjs @@ -1,18 +1,11 @@ import { get_page_events } from '../src/facebook/get-page-events.mjs'; +import { get_credentials, unix, updated } from '../src/util.mjs'; +import { update_last_scraped } from '../src/api/places.mjs'; import send from '../src/signal/send.mjs'; import fetch from 'node-fetch'; const prod = true; - -let api, token; - -if (prod) { - api = 'http://10.0.0.210:8484'; - token = '831411806230c7e950c4eeb226499ef92bb6bdc4157797929a0e16d133dc13a8'; -} else { - api = 'http://localhost:3333'; - token = '1234567812345678123456781234567812345678123456781234567812345678'; -} +const { api, token } = get_credentials(prod); const headers = { 'Content-Type': 'application/json' }; @@ -29,27 +22,6 @@ const scrape = async (pageID) => { } return []; }; -const unix = (a) => parseInt(new Date(a).valueOf() / 1000, 10); -const sleep = (ms) => new Promise((res) => setTimeout(res, ms)); -const updated = (oldEvent, scrapedEvent) => { - let keys = [ - 'canceled', - 'end', - 'start', - 'draft', - 'facebook_id', - 'place_id', - 'name', - 'ticket_url' - ]; - for (let key of keys) { - if (oldEvent[key] != scrapedEvent[key]) { - console.log(124, oldEvent[key], '!=', scrapedEvent[key]); - return true; - } - } - return false; -}; (async () => { let resp = await fetch(`${api}/places/?token=${token}`); @@ -77,8 +49,15 @@ const updated = (oldEvent, scrapedEvent) => { return true; }); + let skuret = places.findIndex((place) => place.id == 50); + places = places.filter((place) => place.id != 50); + places = [...places, skuret]; + for (let place of places) { - console.log(177, `Scraping #${place.id} ${place.name}`); + console.log( + 177, + `Scraping #${place.id} ${place.name}. {facebook_id: ${place.facebook_id}, facebook_name_id: "${place.facebook_name_id}"}` + ); const events = await scrape(place.facebook_id); let payloads = []; for (let event of events) { @@ -140,13 +119,8 @@ const updated = (oldEvent, scrapedEvent) => { console.log(201, 'Skip', place.name, payload.name); } } - let res = await fetch(`${api}/places/${place.id}/?token=${token}`, { - method: 'PATCH', - body: JSON.stringify({ - last_scraped: unix(new Date()) - }), - headers - }); - console.log(res.status, `Last scrape at ${place.name} updated.`); + if (payloads.length > 0) { + await update_last_scraped(place, prod); + } } })(); diff --git a/src/api/places.mjs b/src/api/places.mjs new file mode 100644 index 0000000..f9fc874 --- /dev/null +++ b/src/api/places.mjs @@ -0,0 +1,21 @@ +import fetch from 'node-fetch'; +import { unix, get_credentials } from '../util.mjs'; + +export const update_last_scraped = async (place, prod = false) => { + const { api, token } = get_credentials(prod); + let res = await fetch(`${api}/places/${place.id}/?token=${token}`, { + method: 'PATCH', + body: JSON.stringify({ + last_scraped: unix(new Date()) + }), + headers: { 'Content-Type': 'application/json' } + }); + if (res.ok) { + console.log(res.status, `Last scrape at ${place.name} updated.`); + } else { + console.log( + res.status, + `Last scrape at ${place.name} failed to update last update..` + ); + } +}; diff --git a/src/facebook/get-page-events.mjs b/src/facebook/get-page-events.mjs index 138f75a..620a7dd 100644 --- a/src/facebook/get-page-events.mjs +++ b/src/facebook/get-page-events.mjs @@ -6,7 +6,7 @@ export const past_render_query = async ({ pageID }) => { const resp = await do_request(doc_id, { pageID }); const page = resp?.data?.page?.past_events ?? null; if (page === null) { - console.error('13 Past events was null on ${pageID}'); + console.error(13, `Past events was null on ${pageID}`); } return page; }; @@ -18,7 +18,7 @@ export const past_pagination_query = async ({ pageID, cursor }) => { const resp = await do_request(doc_id, { pageID, cursor, count }); const page = resp?.data?.page?.past_events ?? null; if (page === null) { - console.error('12 Past events was null on ${pageID}'); + console.error(12, `Past events was null on ${pageID}`); } return page; }; @@ -29,7 +29,7 @@ export const upcoming_render_query = async ({ pageID }) => { const resp = await do_request(doc_id, { pageID }); const page = resp?.data?.page?.upcoming_events ?? null; if (page === null) { - console.error('15 Upcoming events was null on ${pageID}'); + console.error(15, `Upcoming events was null on ${pageID}`); } return page; }; @@ -41,7 +41,7 @@ export const upcoming_pagination_query = async ({ pageID, cursor }) => { const resp = await do_request(doc_id, { pageID, cursor, count }); const page = resp?.data?.page?.upcoming_events ?? null; if (page === null) { - console.error('15 Upcoming events was null on ${pageID}'); + console.error(15, `Upcoming events was null on ${pageID}`); } return page; }; @@ -53,7 +53,7 @@ export const upcoming_reoccuring_render_query = async ({ pageID }) => { const page = resp?.data?.page?.upcomingRecurringEvents?.edges ?? null; if (page === null) { - console.error('17 Recc events was null on ${pageID}'); + console.error(17, `Recc events was null on ${pageID}`); } if (page === null || !Array.isArray(page)) { @@ -99,7 +99,7 @@ export const get_page_events = async ({ }); if (paginationResult === null) { ++retries; - console.error(retries, 'retrying'); + console.error(102, retries, 'retrying'); continue; } if (retries > 10) { @@ -126,7 +126,7 @@ export const get_page_events = async ({ }); if (paginationResult === null) { ++retries; - console.error(retries, 'retrying'); + console.error(129, retries, 'retrying'); continue; } if (retries > 10) { diff --git a/src/util.mjs b/src/util.mjs new file mode 100644 index 0000000..96ac0f0 --- /dev/null +++ b/src/util.mjs @@ -0,0 +1,36 @@ +export const get_credentials = (prod = false) => { + let api, token; + if (prod) { + api = 'http://10.0.0.210:8484'; + token = '831411806230c7e950c4eeb226499ef92bb6bdc4157797929a0e16d133dc13a8'; + } else { + api = 'http://localhost:3333'; + token = '1234567812345678123456781234567812345678123456781234567812345678'; + } + return { api, token }; +}; +export const sleep = (ms) => { + return new Promise((res) => setTimeout(res, ms)); +}; +export const unix = (a) => { + return parseInt(new Date(a).valueOf() / 1000, 10); +}; +export const updated = (oldEvent, scrapedEvent) => { + let keys = [ + 'canceled', + 'end', + 'start', + 'draft', + 'facebook_id', + 'place_id', + 'name', + 'ticket_url' + ]; + for (let key of keys) { + if (oldEvent[key] != scrapedEvent[key]) { + console.log(124, key, oldEvent[key], '!=', scrapedEvent[key]); + return true; + } + } + return false; +};