diff --git a/index.ts b/index.ts index 502cc73..5baa4d1 100644 --- a/index.ts +++ b/index.ts @@ -101,7 +101,7 @@ async function main(): Promise { statusMessage(MessageType.Critical, 'Forums already scraped, skipping forum scraping...'); } else { statusMessage(MessageType.Info, 'Scraping forums...'); - await getForums(database, config.domain, sessionID, forumModuleIDs); + await getForums(database, config.domain, sessionID, [...new Set(forumModuleIDs)]); await insertRow(database, 'scrapers', 'forums', true); deleteFiles(['./target/recovery/forum_progress.json']); statusMessage(MessageType.Completion, 'Finished forum scraping'); @@ -131,7 +131,7 @@ async function main(): Promise { statusMessage(MessageType.Critical, 'Wikis already scraped, skipping wiki scraping...'); } else { statusMessage(MessageType.Info, 'Scraping wikis...'); - await getWikis(config.domain, database, wikiModuleIDs); + await getWikis(config.domain, database, [...new Set(wikiModuleIDs)]); await insertRow(database, 'scrapers', 'wikis', true); deleteFiles(['./target/recovery/wiki_progress.json']); statusMessage(MessageType.Completion, 'Finished wiki scraping'); @@ -149,7 +149,7 @@ async function main(): Promise { statusMessage(MessageType.Critical, 'News already scraped, skipping news scraping...'); } else { statusMessage(MessageType.Info, 'Scraping news...'); - await getNews(database, config.domain, sessionID, siteAuth, newsModuleIDs); + await getNews(database, config.domain, sessionID, siteAuth, [...new Set(newsModuleIDs)]); await insertRow(database, 'scrapers', 'news', true); statusMessage(MessageType.Completion, 'Finished news scraping'); } @@ -183,13 +183,15 @@ async function main(): Promise { } // Get tickets + let ticketModuleIDs = await queryModuleIDs(database, 'tickets'); + config.manualTicketModuleIDs && config.manualTicketModuleIDs.length > 0 ? ticketModuleIDs.push(...config.manualTicketModuleIDs) : {}; if (config.disabledModules?.tickets) { statusMessage(MessageType.Critical, 'Tickets module disabled, skipping ticket scraping...'); } else if (await isModuleScraped(database, 'tickets')) { statusMessage(MessageType.Critical, 'Tickets already scraped, skipping ticket scraping...'); } else { statusMessage(MessageType.Info, 'Scraping tickets...'); - await getAllTickets(database, config.domain, config.apiKey, sessionID, siteAuth, adminMode, config.excludeTicketModuleIDs ?? null, config.manualTicketModuleIDs ?? null); + await getAllTickets(database, config.domain, config.apiKey, sessionID, siteAuth, adminMode, config.excludeTicketModuleIDs ?? null, ticketModuleIDs ?? null); await insertRow(database, 'scrapers', 'tickets', true); deleteFiles(['./target/recovery/module_tickets.json']); statusMessage(MessageType.Completion, 'Finished ticket scraping'); diff --git a/package.json b/package.json index 26659dd..0751012 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "enjinscraper", - "version": "1.6.1", + "version": "1.6.2", "description": "Scrapes an Enjin site via the Enjin API", "repository": "https://github.com/Kas-tle/EnjinScraper.git", "author": "Joshua Castle !excludedModules.includes(module)) : {}; statusMessage(MessageType.Info, `Found ${modules.length} ticket modules: ${modules.join(', ')}`); - await getTicketsByModule(database, domain, sessionID, siteAuth, modules, adminMode); + await getTicketsByModule(database, domain, sessionID, siteAuth, [...new Set(modules)], adminMode); } \ No newline at end of file