Update create-database.js
This commit is contained in:
parent
23aef8722d
commit
091d43c4f7
|
@ -13,44 +13,45 @@ const options = program
|
||||||
.parse(process.argv)
|
.parse(process.argv)
|
||||||
.opts()
|
.opts()
|
||||||
|
|
||||||
const links = []
|
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
logger.info('starting...')
|
logger.info('starting...')
|
||||||
logger.info(`number of clusters: ${options.maxClusters}`)
|
logger.info(`number of clusters: ${options.maxClusters}`)
|
||||||
|
|
||||||
await loadChannels()
|
await saveToDatabase(await findStreams())
|
||||||
await saveToDatabase()
|
|
||||||
|
|
||||||
logger.info('done')
|
logger.info('done')
|
||||||
}
|
}
|
||||||
|
|
||||||
main()
|
main()
|
||||||
|
|
||||||
async function loadChannels() {
|
async function findStreams() {
|
||||||
logger.info(`loading links...`)
|
logger.info(`looking for streams...`)
|
||||||
|
|
||||||
|
await db.streams.load()
|
||||||
const files = await file.list(`${options.inputDir}/**/*.m3u`)
|
const files = await file.list(`${options.inputDir}/**/*.m3u`)
|
||||||
|
const streams = []
|
||||||
for (const filepath of files) {
|
for (const filepath of files) {
|
||||||
const items = await parser.parsePlaylist(filepath)
|
const items = await parser.parsePlaylist(filepath)
|
||||||
for (const item of items) {
|
for (const item of items) {
|
||||||
item.filepath = filepath
|
item.filepath = filepath
|
||||||
links.push(item)
|
streams.push(item)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
logger.info(`found ${links.length} links`)
|
logger.info(`found ${streams.length} streams`)
|
||||||
|
|
||||||
|
return streams
|
||||||
}
|
}
|
||||||
|
|
||||||
async function saveToDatabase() {
|
async function saveToDatabase(streams = []) {
|
||||||
logger.info('saving to the database...')
|
logger.info('saving to the database...')
|
||||||
|
|
||||||
await db.reset()
|
await db.streams.reset()
|
||||||
const chunks = split(_.shuffle(links), options.maxClusters)
|
const chunks = split(_.shuffle(streams), options.maxClusters)
|
||||||
for (const [i, chunk] of chunks.entries()) {
|
for (const [i, chunk] of chunks.entries()) {
|
||||||
for (const item of chunk) {
|
for (const item of chunk) {
|
||||||
const stream = store.create()
|
const stream = store.create()
|
||||||
stream.set('id', { id: item.tvg.id })
|
stream.set('channel_id', { channel_id: item.tvg.id })
|
||||||
stream.set('title', { title: item.name })
|
stream.set('channel_name', { title: item.name })
|
||||||
stream.set('filepath', { filepath: item.filepath })
|
stream.set('filepath', { filepath: item.filepath })
|
||||||
stream.set('resolution', { title: item.name })
|
stream.set('resolution', { title: item.name })
|
||||||
stream.set('status', { title: item.name })
|
stream.set('status', { title: item.name })
|
||||||
|
@ -60,14 +61,14 @@ async function saveToDatabase() {
|
||||||
stream.set('updated', { updated: false })
|
stream.set('updated', { updated: false })
|
||||||
stream.set('cluster_id', { cluster_id: i + 1 })
|
stream.set('cluster_id', { cluster_id: i + 1 })
|
||||||
|
|
||||||
if (!stream.get('id')) {
|
if (!stream.get('channel_id')) {
|
||||||
const id = cid.generate(item.name, item.filepath)
|
const channel_id = cid.generate(item.name, item.filepath)
|
||||||
|
|
||||||
stream.set('id', { id })
|
stream.set('channel_id', { channel_id })
|
||||||
stream.set('updated', { updated: true })
|
stream.set('updated', { updated: true })
|
||||||
}
|
}
|
||||||
|
|
||||||
await db.insert(stream.data())
|
await db.streams.insert(stream.data())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,4 +2,4 @@ exports.is_broken = require('./is_broken')
|
||||||
exports.resolution = require('./resolution')
|
exports.resolution = require('./resolution')
|
||||||
exports.status = require('./status')
|
exports.status = require('./status')
|
||||||
exports.url = require('./url')
|
exports.url = require('./url')
|
||||||
exports.title = require('./title')
|
exports.channel_name = require('./channel_name')
|
||||||
|
|
|
@ -6,14 +6,14 @@ beforeEach(() => {
|
||||||
fs.emptyDirSync('tests/__data__/output')
|
fs.emptyDirSync('tests/__data__/output')
|
||||||
|
|
||||||
const stdout = execSync(
|
const stdout = execSync(
|
||||||
'DB_FILEPATH=tests/__data__/output/streams.db node scripts/commands/create-database.js --input-dir=tests/__data__/input/channels --max-clusters=1',
|
'DB_DIR=tests/__data__/output/database node scripts/commands/create-database.js --input-dir=tests/__data__/input/channels --max-clusters=1',
|
||||||
{ encoding: 'utf8' }
|
{ encoding: 'utf8' }
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('can create database', () => {
|
it('can create database', () => {
|
||||||
let output = content('tests/__data__/output/streams.db')
|
let output = content('tests/__data__/output/database/streams.db')
|
||||||
let expected = content('tests/__data__/expected/streams.db')
|
let expected = content('tests/__data__/expected/database/streams.db')
|
||||||
|
|
||||||
output = output.map(i => {
|
output = output.map(i => {
|
||||||
i._id = null
|
i._id = null
|
||||||
|
|
Loading…
Reference in New Issue