2022-02-05 00:48:48 +01:00
|
|
|
const { db, file, parser, store, logger, cid } = require('../core')
|
2021-12-12 05:10:03 +01:00
|
|
|
const { program } = require('commander')
|
|
|
|
const _ = require('lodash')
|
|
|
|
|
|
|
|
const options = program
|
|
|
|
.option(
|
|
|
|
'--max-clusters <max-clusters>',
|
|
|
|
'Set maximum number of clusters',
|
|
|
|
parser.parseNumber,
|
|
|
|
200
|
|
|
|
)
|
|
|
|
.option('--input-dir <input-dir>', 'Set path to input directory', 'channels')
|
|
|
|
.parse(process.argv)
|
|
|
|
.opts()
|
|
|
|
|
|
|
|
async function main() {
|
2022-02-05 00:48:48 +01:00
|
|
|
logger.info('starting...')
|
|
|
|
logger.info(`number of clusters: ${options.maxClusters}`)
|
2021-12-12 05:10:03 +01:00
|
|
|
|
2022-02-07 04:04:41 +01:00
|
|
|
await saveToDatabase(await findStreams())
|
2021-12-12 05:10:03 +01:00
|
|
|
|
2022-02-05 00:48:48 +01:00
|
|
|
logger.info('done')
|
2021-12-12 05:10:03 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
main()
|
|
|
|
|
2022-02-07 04:04:41 +01:00
|
|
|
async function findStreams() {
|
|
|
|
logger.info(`looking for streams...`)
|
2021-12-12 05:10:03 +01:00
|
|
|
|
2022-02-07 04:04:41 +01:00
|
|
|
await db.streams.load()
|
2021-12-12 05:10:03 +01:00
|
|
|
const files = await file.list(`${options.inputDir}/**/*.m3u`)
|
2022-02-07 04:04:41 +01:00
|
|
|
const streams = []
|
2021-12-12 05:10:03 +01:00
|
|
|
for (const filepath of files) {
|
|
|
|
const items = await parser.parsePlaylist(filepath)
|
|
|
|
for (const item of items) {
|
|
|
|
item.filepath = filepath
|
2022-02-07 04:04:41 +01:00
|
|
|
streams.push(item)
|
2021-12-12 05:10:03 +01:00
|
|
|
}
|
|
|
|
}
|
2022-02-07 04:04:41 +01:00
|
|
|
logger.info(`found ${streams.length} streams`)
|
|
|
|
|
|
|
|
return streams
|
2021-12-12 05:10:03 +01:00
|
|
|
}
|
|
|
|
|
2022-02-07 04:04:41 +01:00
|
|
|
async function saveToDatabase(streams = []) {
|
2022-02-05 00:48:48 +01:00
|
|
|
logger.info('saving to the database...')
|
2021-12-12 05:10:03 +01:00
|
|
|
|
2022-02-07 04:04:41 +01:00
|
|
|
await db.streams.reset()
|
|
|
|
const chunks = split(_.shuffle(streams), options.maxClusters)
|
2021-12-12 05:10:03 +01:00
|
|
|
for (const [i, chunk] of chunks.entries()) {
|
|
|
|
for (const item of chunk) {
|
|
|
|
const stream = store.create()
|
2022-02-07 04:04:41 +01:00
|
|
|
stream.set('channel_id', { channel_id: item.tvg.id })
|
|
|
|
stream.set('channel_name', { title: item.name })
|
2021-12-12 05:10:03 +01:00
|
|
|
stream.set('filepath', { filepath: item.filepath })
|
|
|
|
stream.set('resolution', { title: item.name })
|
|
|
|
stream.set('status', { title: item.name })
|
|
|
|
stream.set('url', { url: item.url })
|
|
|
|
stream.set('http', { http: item.http })
|
|
|
|
stream.set('is_broken', { status: stream.get('status') })
|
|
|
|
stream.set('updated', { updated: false })
|
|
|
|
stream.set('cluster_id', { cluster_id: i + 1 })
|
|
|
|
|
2022-02-07 04:04:41 +01:00
|
|
|
if (!stream.get('channel_id')) {
|
|
|
|
const channel_id = cid.generate(item.name, item.filepath)
|
2022-02-05 00:48:48 +01:00
|
|
|
|
2022-02-07 04:04:41 +01:00
|
|
|
stream.set('channel_id', { channel_id })
|
2022-02-05 00:48:48 +01:00
|
|
|
stream.set('updated', { updated: true })
|
2021-12-12 05:10:03 +01:00
|
|
|
}
|
|
|
|
|
2022-02-07 04:04:41 +01:00
|
|
|
await db.streams.insert(stream.data())
|
2021-12-12 05:10:03 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function split(arr, n) {
|
|
|
|
let result = []
|
|
|
|
for (let i = n; i > 0; i--) {
|
|
|
|
result.push(arr.splice(0, Math.ceil(arr.length / i)))
|
|
|
|
}
|
|
|
|
return result
|
|
|
|
}
|