2022-02-12 23:38:07 +01:00
|
|
|
const { db, store, parser, file, logger } = require('../../core')
|
|
|
|
const _ = require('lodash')
|
|
|
|
|
|
|
|
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/cluster/load'
|
|
|
|
|
|
|
|
async function main() {
|
2022-02-14 01:17:29 +01:00
|
|
|
const streams = await loadStreams()
|
2022-02-12 23:38:07 +01:00
|
|
|
const results = await loadResults()
|
2022-02-14 01:17:29 +01:00
|
|
|
const origins = await loadOrigins(results)
|
2022-02-12 23:38:07 +01:00
|
|
|
|
2022-02-14 01:17:29 +01:00
|
|
|
await updateStreams(streams, results, origins)
|
2022-02-12 23:38:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
main()
|
|
|
|
|
2022-02-14 01:17:29 +01:00
|
|
|
async function updateStreams(items = [], results = {}, origins = {}) {
|
|
|
|
logger.info('updating streams...')
|
|
|
|
|
|
|
|
let buffer = {}
|
|
|
|
let updated = 0
|
|
|
|
let removed = 0
|
|
|
|
for (const item of items) {
|
|
|
|
const stream = store.create(item)
|
|
|
|
const result = results[item._id]
|
|
|
|
if (result) {
|
|
|
|
const status = parseStatus(result.error)
|
|
|
|
stream.set('status', { status })
|
|
|
|
|
|
|
|
if (result.streams.length) {
|
|
|
|
const { width, height, bitrate } = parseMediaInfo(result.streams)
|
|
|
|
stream.set('width', { width })
|
|
|
|
stream.set('height', { height })
|
|
|
|
stream.set('bitrate', { bitrate })
|
|
|
|
}
|
|
|
|
|
|
|
|
if (result.requests.length) {
|
|
|
|
const origin = findOrigin(result.requests, origins)
|
|
|
|
if (origin) {
|
|
|
|
stream.set('url', { url: origin })
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (buffer[stream.get('url')]) {
|
|
|
|
await db.streams.remove({ _id: stream.get('_id') })
|
|
|
|
removed++
|
|
|
|
} else if (stream.changed) {
|
|
|
|
await db.streams.update({ _id: stream.get('_id') }, stream.data())
|
|
|
|
buffer[stream.get('url')] = true
|
|
|
|
updated++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
db.streams.compact()
|
|
|
|
|
|
|
|
logger.info(`updated ${updated} streams`)
|
|
|
|
logger.info(`removed ${removed} duplicates`)
|
|
|
|
logger.info('done')
|
|
|
|
}
|
|
|
|
|
2022-02-12 23:38:07 +01:00
|
|
|
async function loadStreams() {
|
|
|
|
logger.info('loading streams...')
|
|
|
|
|
|
|
|
await db.streams.load()
|
|
|
|
const streams = await db.streams.find({})
|
|
|
|
|
|
|
|
logger.info(`found ${streams.length} streams`)
|
|
|
|
|
|
|
|
return streams
|
|
|
|
}
|
|
|
|
|
|
|
|
async function loadResults() {
|
2022-02-14 01:17:29 +01:00
|
|
|
logger.info('loading check results...')
|
2022-02-12 23:38:07 +01:00
|
|
|
|
|
|
|
const results = {}
|
|
|
|
const files = await file.list(`${LOGS_DIR}/cluster_*.log`)
|
|
|
|
for (const filepath of files) {
|
|
|
|
const parsed = await parser.parseLogs(filepath)
|
|
|
|
for (const item of parsed) {
|
|
|
|
results[item._id] = item
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.info(`found ${Object.values(results).length} results`)
|
|
|
|
|
|
|
|
return results
|
|
|
|
}
|
|
|
|
|
2022-02-14 01:17:29 +01:00
|
|
|
async function loadOrigins(results = {}) {
|
|
|
|
logger.info('loading origins...')
|
2022-02-12 23:38:07 +01:00
|
|
|
|
|
|
|
const origins = {}
|
|
|
|
for (const { error, requests } of Object.values(results)) {
|
|
|
|
if (error || !Array.isArray(requests) || !requests.length) continue
|
|
|
|
|
|
|
|
let origin = requests.shift()
|
|
|
|
origin = new URL(origin.url)
|
|
|
|
for (const request of requests) {
|
|
|
|
const curr = new URL(request.url)
|
|
|
|
const key = curr.href.replace(/(^\w+:|^)/, '')
|
|
|
|
if (!origins[key] && curr.host === origin.host) {
|
|
|
|
origins[key] = origin.href
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.info(`found ${_.uniq(Object.values(origins)).length} origins`)
|
|
|
|
|
|
|
|
return origins
|
|
|
|
}
|
|
|
|
|
|
|
|
function findOrigin(requests = [], origins = {}) {
|
|
|
|
if (origins && Array.isArray(requests)) {
|
|
|
|
requests = requests.map(r => r.url.replace(/(^\w+:|^)/, ''))
|
|
|
|
for (const url of requests) {
|
|
|
|
if (origins[url]) {
|
|
|
|
return origins[url]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return null
|
|
|
|
}
|
|
|
|
|
2022-02-14 01:17:29 +01:00
|
|
|
function parseMediaInfo(streams) {
|
2022-02-13 03:04:24 +01:00
|
|
|
streams = streams.filter(s => s.codec_type === 'video')
|
2022-02-13 03:30:31 +01:00
|
|
|
streams = _.orderBy(
|
|
|
|
streams,
|
2022-02-13 08:33:16 +01:00
|
|
|
['height', s => (s.tags && s.tags.variant_bitrate ? parseInt(s.tags.variant_bitrate) : 0)],
|
2022-02-13 03:30:31 +01:00
|
|
|
['desc', 'desc']
|
|
|
|
)
|
2022-02-13 03:04:24 +01:00
|
|
|
|
|
|
|
const data = _.head(streams)
|
|
|
|
if (data) {
|
2022-02-13 08:33:16 +01:00
|
|
|
const bitrate = data.tags && data.tags.variant_bitrate ? parseInt(data.tags.variant_bitrate) : 0
|
2022-02-13 03:04:24 +01:00
|
|
|
return { width: data.width, height: data.height, bitrate }
|
|
|
|
}
|
2022-02-12 23:38:07 +01:00
|
|
|
|
2022-02-13 03:04:24 +01:00
|
|
|
return {}
|
2022-02-12 23:38:07 +01:00
|
|
|
}
|
|
|
|
|
2022-02-13 08:33:16 +01:00
|
|
|
function parseStatus(error) {
|
|
|
|
if (!error) return 'online'
|
2022-02-12 23:38:07 +01:00
|
|
|
|
2022-02-13 08:33:16 +01:00
|
|
|
switch (error) {
|
|
|
|
case 'Operation timed out':
|
|
|
|
return 'timeout'
|
|
|
|
case 'Server returned 403 Forbidden (access denied)':
|
|
|
|
return 'blocked'
|
|
|
|
default:
|
|
|
|
return 'error'
|
2022-02-13 02:12:39 +01:00
|
|
|
}
|
2022-02-12 23:38:07 +01:00
|
|
|
}
|