use crawling logic as a service for ICS retrieval
This commit is contained in:
parent
0eda018113
commit
b3321b2f90
|
@ -5,6 +5,7 @@ const favicon = require('serve-favicon')
|
|||
const rateLimit = require('express-rate-limit')
|
||||
|
||||
const retrieveICS = require('./services/ics-retriever')
|
||||
const crawl = require('./services/crawler')
|
||||
const {
|
||||
genericErrorHandler,
|
||||
checkURLParameter,
|
||||
|
@ -83,6 +84,7 @@ app.post('/download', async (req, res, next) => {
|
|||
|
||||
const ics = await retrieveICS(url, {
|
||||
logger: appLogger,
|
||||
crawl,
|
||||
})
|
||||
|
||||
res
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
const crawl = require('./crawler')
|
||||
const parseUsingLDJSONData = require('./ldjson-parser')
|
||||
const parseUsingDOM = require('./dom-parser')
|
||||
const generateICS = require('./ics-generator')
|
||||
const { createParserError, getNormalizedUrl } = require('../utils')
|
||||
|
||||
const retrieveICS = async (URLparameter, { logger }) => {
|
||||
const retrieveICS = async (URLparameter, { logger, crawl }) => {
|
||||
const url = getNormalizedUrl(URLparameter)
|
||||
const html = await crawl(url, { logger })
|
||||
const LDJSONEventData = parseUsingLDJSONData(html, { logger })
|
||||
|
|
Loading…
Reference in New Issue