Update load-streams.js

This commit is contained in:
Aleksandr Statciuk 2022-02-05 03:19:31 +03:00
parent d4ccef3719
commit 9a4aea7949
5 changed files with 46 additions and 54 deletions

View File

@ -42,7 +42,7 @@ jobs:
with: with:
node-version: '14' node-version: '14'
- run: npm install - run: npm install
- run: node scripts/commands/check-streams.js --cluster-id=${{ matrix.cluster_id }} - run: node scripts/commands/load-streams.js --cluster-id=${{ matrix.cluster_id }}
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
with: with:
name: logs name: logs

View File

@ -18,20 +18,20 @@ const config = {
const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs' const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs'
async function main() { async function main() {
logger.info('Starting...') logger.info('starting...')
logger.info(`Timeout: ${options.timeout}ms`) logger.info(`timeout: ${options.timeout}ms`)
logger.info(`Delay: ${options.delay}ms`) logger.info(`delay: ${options.delay}ms`)
timer.start() timer.start()
const clusterLog = `${LOGS_PATH}/check-streams/cluster_${options.clusterId}.log` const clusterLog = `${LOGS_PATH}/load-streams/cluster_${options.clusterId}.log`
logger.info(`Loading cluster: ${options.clusterId}`) logger.info(`loading cluster: ${options.clusterId}`)
logger.info(`Creating '${clusterLog}'...`) logger.info(`creating '${clusterLog}'...`)
await file.create(clusterLog) await file.create(clusterLog)
const items = await db.find({ cluster_id: options.clusterId }) const items = await db.find({ cluster_id: options.clusterId })
const total = items.length const total = items.length
logger.info(`Found ${total} links`) logger.info(`found ${total} links`)
logger.info('Checking...') logger.info('checking...')
const results = {} const results = {}
for (const [i, item] of items.entries()) { for (const [i, item] of items.entries()) {
const message = `[${i + 1}/${total}] ${item.filepath}: ${item.url}` const message = `[${i + 1}/${total}] ${item.filepath}: ${item.url}`
@ -44,7 +44,7 @@ async function main() {
await file.append(clusterLog, JSON.stringify(result) + '\n') await file.append(clusterLog, JSON.stringify(result) + '\n')
} }
logger.info(`Done in ${timer.format('HH[h] mm[m] ss[s]')}`) logger.info(`done in ${timer.format('HH[h] mm[m] ss[s]')}`)
} }
main() main()

View File

@ -0,0 +1,2 @@
{"_id":"2ST8btby3mmsgPF0","url":"http://46.46.143.222:1935/live/mp4:ldpr.stream/playlist.m3u8","http":{"referrer":"","user-agent":""},"error":"Operation timed out","streams":[],"requests":[]}
{"_id":"I6cjG2xCBRFFP4sz","url":"https://iptv-all.lanesh4d0w.repl.co/andorra/atv","http":{"referrer":"","user-agent":""},"error":"Operation timed out","streams":[],"requests":[]}

View File

@ -1,44 +0,0 @@
const fs = require('fs')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.rmdirSync('tests/__data__/output', { recursive: true })
fs.mkdirSync('tests/__data__/output')
fs.copyFileSync('tests/__data__/input/test.db', 'tests/__data__/temp/test.db')
})
afterEach(() => {
fs.rmdirSync('tests/__data__/temp', { recursive: true })
fs.mkdirSync('tests/__data__/temp')
})
it('return results if stream with error', () => {
const result = execSync(
'DB_FILEPATH=tests/__data__/temp/test.db LOGS_PATH=tests/__data__/output/logs node scripts/commands/check-streams.js --cluster-id=1 --timeout=1',
{ encoding: 'utf8' }
)
const logs = fs.readFileSync(
path.resolve('tests/__data__/output/logs/check-streams/cluster_1.log'),
{
encoding: 'utf8'
}
)
const lines = logs.split('\n')
expect(JSON.parse(lines[0])).toMatchObject({
_id: '2ST8btby3mmsgPF0',
url: 'http://46.46.143.222:1935/live/mp4:ldpr.stream/playlist.m3u8',
http: { referrer: '', 'user-agent': '' },
error: 'Operation timed out',
streams: [],
requests: []
})
expect(JSON.parse(lines[1])).toMatchObject({
_id: 'I6cjG2xCBRFFP4sz',
url: 'https://iptv-all.lanesh4d0w.repl.co/andorra/atv',
http: { referrer: '', 'user-agent': '' },
error: 'Operation timed out',
streams: [],
requests: []
})
})

View File

@ -0,0 +1,34 @@
const fs = require('fs-extra')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.emptyDirSync('tests/__data__/temp')
fs.copyFileSync('tests/__data__/input/channels.db', 'tests/__data__/temp/channels.db')
const stdout = execSync(
'DB_FILEPATH=tests/__data__/temp/channels.db LOGS_PATH=tests/__data__/output/logs node scripts/commands/load-streams.js --cluster-id=1 --timeout=1',
{ encoding: 'utf8' }
)
})
it('return results', () => {
let output = content('tests/__data__/output/logs/load-streams/cluster_1.log')
let expected = content('tests/__data__/expected/logs/load-streams/cluster_1.log')
expect(output).toEqual(expected)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return data
.split('\n')
.filter(l => l)
.map(l => {
return JSON.parse(l)
})
}