use crawling logic as a service for ICS retrieval

This commit is contained in:
Ondřej Synáček 2020-07-17 20:54:11 +02:00
parent 0eda018113
commit b3321b2f90
2 changed files with 3 additions and 2 deletions

View File

@ -5,6 +5,7 @@ const favicon = require('serve-favicon')
const rateLimit = require('express-rate-limit')
const retrieveICS = require('./services/ics-retriever')
const crawl = require('./services/crawler')
const {
genericErrorHandler,
checkURLParameter,
@ -83,6 +84,7 @@ app.post('/download', async (req, res, next) => {
const ics = await retrieveICS(url, {
logger: appLogger,
crawl,
})
res

View File

@ -1,10 +1,9 @@
const crawl = require('./crawler')
const parseUsingLDJSONData = require('./ldjson-parser')
const parseUsingDOM = require('./dom-parser')
const generateICS = require('./ics-generator')
const { createParserError, getNormalizedUrl } = require('../utils')
const retrieveICS = async (URLparameter, { logger }) => {
const retrieveICS = async (URLparameter, { logger, crawl }) => {
const url = getNormalizedUrl(URLparameter)
const html = await crawl(url, { logger })
const LDJSONEventData = parseUsingLDJSONData(html, { logger })