Update save-results.js

This commit is contained in:
Aleksandr Statciuk 2022-02-07 06:51:59 +03:00
parent c7dc04e1bb
commit c2b557c34c
3 changed files with 45 additions and 33 deletions

View File

@ -2,48 +2,53 @@ const _ = require('lodash')
const statuses = require('../data/statuses') const statuses = require('../data/statuses')
const { db, store, parser, file, logger } = require('../core') const { db, store, parser, file, logger } = require('../core')
let streams = []
let results = {}
const origins = {}
const items = [] const items = []
const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs/load-streams' const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/load-cluster'
async function main() { async function main() {
await loadDatabase() let streams = await loadStreams()
await loadResults() const results = await loadResults()
await findStreamOrigins() const origins = await findOrigins(results)
await updateStreams() streams = await updateStreams(streams, results, origins)
await updateDatabase()
await updateDatabase(streams)
} }
main() main()
async function loadDatabase() { async function loadStreams() {
logger.info('loading database...') logger.info('loading streams...')
streams = await db.find({}) await db.streams.load()
const streams = await db.streams.find({})
logger.info(`found ${streams.length} streams`) logger.info(`found ${streams.length} streams`)
return streams
} }
async function loadResults() { async function loadResults() {
logger.info('loading results from logs/load-streams...') logger.info('loading results from logs...')
const files = await file.list(`${LOGS_PATH}/cluster_*.log`) const results = {}
const files = await file.list(`${LOGS_DIR}/cluster_*.log`)
for (const filepath of files) { for (const filepath of files) {
const parsed = await parser.parseLogs(filepath) const parsed = await parser.parseLogs(filepath)
for (const result of parsed) { for (const item of parsed) {
results[result._id] = result results[item._id] = item
} }
} }
logger.info(`found ${Object.values(results).length} results`) logger.info(`found ${Object.values(results).length} results`)
return results
} }
async function findStreamOrigins() { async function findOrigins(results = {}) {
logger.info('searching for stream origins...') logger.info('searching for stream origins...')
const origins = {}
for (const { error, requests } of Object.values(results)) { for (const { error, requests } of Object.values(results)) {
if (error || !Array.isArray(requests) || !requests.length) continue if (error || !Array.isArray(requests) || !requests.length) continue
@ -59,20 +64,23 @@ async function findStreamOrigins() {
} }
logger.info(`found ${_.uniq(Object.values(origins)).length} origins`) logger.info(`found ${_.uniq(Object.values(origins)).length} origins`)
return origins
} }
async function updateStreams() { async function updateStreams(items = [], results = {}, origins = {}) {
logger.info('updating streams...') logger.info('updating streams...')
let updated = 0 let updated = 0
for (const item of streams) { const output = []
for (const item of items) {
const stream = store.create(item) const stream = store.create(item)
const result = results[item._id] const result = results[item._id]
if (result) { if (result) {
const { error, streams, requests } = result const { error, streams, requests } = result
const resolution = parseResolution(streams) const resolution = parseResolution(streams)
const origin = findOrigin(requests) const origin = findOrigin(requests, origins)
let status = parseStatus(error) let status = parseStatus(error)
if (status) { if (status) {
@ -105,26 +113,28 @@ async function updateStreams() {
if (stream.changed) { if (stream.changed) {
stream.set('updated', true) stream.set('updated', true)
items.push(stream.data()) output.push(stream.data())
updated++ updated++
} }
} }
logger.info(`updated ${updated} items`) logger.info(`updated ${updated} streams`)
return output
} }
async function updateDatabase() { async function updateDatabase(streams = []) {
logger.info('updating database...') logger.info('updating database...')
for (const item of items) { for (const stream of streams) {
await db.update({ _id: item._id }, item) await db.streams.update({ _id: stream._id }, stream)
} }
db.compact() db.streams.compact()
logger.info('done') logger.info('done')
} }
function findOrigin(requests) { function findOrigin(requests = [], origins = {}) {
if (origins && Array.isArray(requests)) { if (origins && Array.isArray(requests)) {
requests = requests.map(r => r.url.replace(/(^\w+:|^)/, '')) requests = requests.map(r => r.url.replace(/(^\w+:|^)/, ''))
for (const url of requests) { for (const url of requests) {

View File

@ -4,19 +4,21 @@ const { execSync } = require('child_process')
beforeEach(() => { beforeEach(() => {
fs.emptyDirSync('tests/__data__/temp') fs.emptyDirSync('tests/__data__/temp')
fs.copyFileSync('tests/__data__/input/save-results.streams.db', 'tests/__data__/temp/streams.db') fs.copyFileSync(
'tests/__data__/input/database/save-results.streams.db',
'tests/__data__/temp/streams.db'
)
const stdout = execSync( const stdout = execSync(
'DB_FILEPATH=tests/__data__/temp/streams.db LOGS_PATH=tests/__data__/input/logs/load-streams node scripts/commands/save-results.js', 'DB_DIR=tests/__data__/temp LOGS_DIR=tests/__data__/input/logs/load-cluster node scripts/commands/save-results.js',
{ encoding: 'utf8' } { encoding: 'utf8' }
) )
}) })
it('can save results', () => { it('can save results', () => {
const output = content('tests/__data__/temp/streams.db') expect(content('tests/__data__/temp/streams.db')).toEqual(
const expected = content('tests/__data__/expected/save-results.streams.db') content('tests/__data__/expected/save-results.streams.db')
)
expect(output).toEqual(expected)
}) })
function content(filepath) { function content(filepath) {