Create scripts/commands
This commit is contained in:
parent
b775228d1e
commit
213da67762
|
@ -0,0 +1,50 @@
|
|||
const { program } = require('commander')
|
||||
const { db, logger, timer, checker, store, file, parser } = require('../core')
|
||||
|
||||
const options = program
|
||||
.requiredOption('-c, --cluster-id <cluster-id>', 'The ID of cluster to load', parser.parseNumber)
|
||||
.option('-t, --timeout <timeout>', 'Set timeout for each request', parser.parseNumber, 60000)
|
||||
.option('-d, --delay <delay>', 'Set delay for each request', parser.parseNumber, 0)
|
||||
.option('--debug', 'Enable debug mode')
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
const config = {
|
||||
timeout: options.timeout,
|
||||
delay: options.delay,
|
||||
debug: options.debug
|
||||
}
|
||||
|
||||
const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs'
|
||||
|
||||
async function main() {
|
||||
logger.info('Starting...')
|
||||
logger.info(`Timeout: ${options.timeout}ms`)
|
||||
logger.info(`Delay: ${options.delay}ms`)
|
||||
timer.start()
|
||||
|
||||
const clusterLog = `${LOGS_PATH}/check-streams/cluster_${options.clusterId}.log`
|
||||
logger.info(`Loading cluster: ${options.clusterId}`)
|
||||
logger.info(`Creating '${clusterLog}'...`)
|
||||
await file.create(clusterLog)
|
||||
const items = await db.find({ cluster_id: options.clusterId })
|
||||
const total = items.length
|
||||
logger.info(`Found ${total} links`)
|
||||
|
||||
logger.info('Checking...')
|
||||
const results = {}
|
||||
for (const [i, item] of items.entries()) {
|
||||
const message = `[${i + 1}/${total}] ${item.filepath}: ${item.url}`
|
||||
const result = await checker.check(item, config)
|
||||
if (!result.error) {
|
||||
logger.info(message)
|
||||
} else {
|
||||
logger.info(`${message} (${result.error})`)
|
||||
}
|
||||
await file.append(clusterLog, JSON.stringify(result) + '\n')
|
||||
}
|
||||
|
||||
logger.info(`Done in ${timer.format('HH[h] mm[m] ss[s]')}`)
|
||||
}
|
||||
|
||||
main()
|
|
@ -0,0 +1,14 @@
|
|||
const { db, logger } = require('../core')
|
||||
|
||||
async function main() {
|
||||
const removed = await db.remove(
|
||||
{ 'status.code': { $in: ['timeout', 'offline'] } },
|
||||
{ multi: true }
|
||||
)
|
||||
|
||||
db.compact()
|
||||
|
||||
logger.info(`Removed ${removed} links`)
|
||||
}
|
||||
|
||||
main()
|
|
@ -0,0 +1,104 @@
|
|||
const { db, file, parser, store, logger } = require('../core')
|
||||
const transliteration = require('transliteration')
|
||||
const { program } = require('commander')
|
||||
const _ = require('lodash')
|
||||
|
||||
const options = program
|
||||
.option(
|
||||
'--max-clusters <max-clusters>',
|
||||
'Set maximum number of clusters',
|
||||
parser.parseNumber,
|
||||
200
|
||||
)
|
||||
.option('--input-dir <input-dir>', 'Set path to input directory', 'channels')
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
const links = []
|
||||
|
||||
async function main() {
|
||||
logger.info('Starting...')
|
||||
logger.info(`Number of clusters: ${options.maxClusters}`)
|
||||
|
||||
await loadChannels()
|
||||
await saveToDatabase()
|
||||
|
||||
logger.info('Done')
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadChannels() {
|
||||
logger.info(`Loading links...`)
|
||||
|
||||
const files = await file.list(`${options.inputDir}/**/*.m3u`)
|
||||
for (const filepath of files) {
|
||||
const items = await parser.parsePlaylist(filepath)
|
||||
for (const item of items) {
|
||||
item.filepath = filepath
|
||||
links.push(item)
|
||||
}
|
||||
}
|
||||
logger.info(`Found ${links.length} links`)
|
||||
}
|
||||
|
||||
async function saveToDatabase() {
|
||||
logger.info('Saving to the database...')
|
||||
|
||||
await db.reset()
|
||||
const chunks = split(_.shuffle(links), options.maxClusters)
|
||||
for (const [i, chunk] of chunks.entries()) {
|
||||
for (const item of chunk) {
|
||||
const stream = store.create()
|
||||
stream.set('name', { title: item.name })
|
||||
stream.set('id', { id: item.tvg.id })
|
||||
stream.set('filepath', { filepath: item.filepath })
|
||||
stream.set('src_country', { filepath: item.filepath })
|
||||
stream.set('tvg_country', { tvg_country: item.tvg.country })
|
||||
stream.set('countries', { tvg_country: item.tvg.country })
|
||||
stream.set('regions', { countries: stream.get('countries') })
|
||||
stream.set('languages', { tvg_language: item.tvg.language })
|
||||
stream.set('categories', { group_title: item.group.title })
|
||||
stream.set('tvg_url', { tvg_url: item.tvg.url })
|
||||
stream.set('guides', { tvg_url: item.tvg.url })
|
||||
stream.set('logo', { logo: item.tvg.logo })
|
||||
stream.set('resolution', { title: item.name })
|
||||
stream.set('status', { title: item.name })
|
||||
stream.set('url', { url: item.url })
|
||||
stream.set('http', { http: item.http })
|
||||
stream.set('is_nsfw', { categories: stream.get('categories') })
|
||||
stream.set('is_broken', { status: stream.get('status') })
|
||||
stream.set('updated', { updated: false })
|
||||
stream.set('cluster_id', { cluster_id: i + 1 })
|
||||
|
||||
if (!stream.get('id')) {
|
||||
const id = generateChannelId(stream.get('name'), stream.get('src_country'))
|
||||
stream.set('id', { id })
|
||||
}
|
||||
|
||||
await db.insert(stream.data())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function split(arr, n) {
|
||||
let result = []
|
||||
for (let i = n; i > 0; i--) {
|
||||
result.push(arr.splice(0, Math.ceil(arr.length / i)))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
function generateChannelId(name, src_country) {
|
||||
if (name && src_country) {
|
||||
const slug = transliteration
|
||||
.transliterate(name)
|
||||
.replace(/\+/gi, 'Plus')
|
||||
.replace(/[^a-z\d]+/gi, '')
|
||||
const code = src_country.code.toLowerCase()
|
||||
|
||||
return `${slug}.${code}`
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
const { logger, db } = require('../core')
|
||||
|
||||
async function main() {
|
||||
const docs = await db.find({}).sort({ cluster_id: 1 })
|
||||
const cluster_id = docs.reduce((acc, curr) => {
|
||||
if (!acc.includes(curr.cluster_id)) acc.push(curr.cluster_id)
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
const matrix = { cluster_id }
|
||||
const output = `::set-output name=matrix::${JSON.stringify(matrix)}`
|
||||
logger.info(output)
|
||||
}
|
||||
|
||||
main()
|
|
@ -0,0 +1,321 @@
|
|||
const { db, logger, generator, file } = require('../core')
|
||||
const _ = require('lodash')
|
||||
|
||||
let languages = []
|
||||
let countries = []
|
||||
let categories = []
|
||||
let regions = []
|
||||
|
||||
const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs'
|
||||
const PUBLIC_PATH = process.env.PUBLIC_PATH || '.gh-pages'
|
||||
|
||||
async function main() {
|
||||
await setUp()
|
||||
|
||||
await generateCategories()
|
||||
await generateCountries()
|
||||
await generateLanguages()
|
||||
await generateRegions()
|
||||
await generateIndex()
|
||||
await generateIndexNSFW()
|
||||
await generateIndexCategory()
|
||||
await generateIndexCountry()
|
||||
await generateIndexLanguage()
|
||||
await generateIndexRegion()
|
||||
|
||||
await generateChannelsJson()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function generateCategories() {
|
||||
logger.info(`Generating categories/...`)
|
||||
|
||||
for (const category of categories) {
|
||||
const { count } = await generator.generate(
|
||||
`${PUBLIC_PATH}/categories/${category.slug}.m3u`,
|
||||
{ categories: { $elemMatch: category } },
|
||||
{ saveEmpty: true, includeNSFW: true }
|
||||
)
|
||||
|
||||
await log('categories', {
|
||||
name: category.name,
|
||||
slug: category.slug,
|
||||
count
|
||||
})
|
||||
}
|
||||
|
||||
const { count: otherCount } = await generator.generate(
|
||||
`${PUBLIC_PATH}/categories/other.m3u`,
|
||||
{ categories: { $size: 0 } },
|
||||
{ saveEmpty: true }
|
||||
)
|
||||
|
||||
await log('categories', {
|
||||
name: 'Other',
|
||||
slug: 'other',
|
||||
count: otherCount
|
||||
})
|
||||
}
|
||||
|
||||
async function generateCountries() {
|
||||
logger.info(`Generating countries/...`)
|
||||
|
||||
for (const country of countries) {
|
||||
const { count } = await generator.generate(
|
||||
`${PUBLIC_PATH}/countries/${country.code.toLowerCase()}.m3u`,
|
||||
{
|
||||
countries: { $elemMatch: country }
|
||||
}
|
||||
)
|
||||
|
||||
await log('countries', {
|
||||
name: country.name,
|
||||
code: country.code,
|
||||
count
|
||||
})
|
||||
}
|
||||
|
||||
const { count: intCount } = await generator.generate(`${PUBLIC_PATH}/countries/int.m3u`, {
|
||||
tvg_country: 'INT'
|
||||
})
|
||||
|
||||
await log('countries', {
|
||||
name: 'International',
|
||||
code: 'INT',
|
||||
count: intCount
|
||||
})
|
||||
|
||||
const { count: undefinedCount } = await generator.generate(
|
||||
`${PUBLIC_PATH}/countries/undefined.m3u`,
|
||||
{
|
||||
countries: { $size: 0 }
|
||||
}
|
||||
)
|
||||
|
||||
await log('countries', {
|
||||
name: 'Undefined',
|
||||
code: 'UNDEFINED',
|
||||
count: undefinedCount
|
||||
})
|
||||
}
|
||||
|
||||
async function generateLanguages() {
|
||||
logger.info(`Generating languages/...`)
|
||||
|
||||
for (const language of _.uniqBy(languages, 'code')) {
|
||||
const { count } = await generator.generate(`${PUBLIC_PATH}/languages/${language.code}.m3u`, {
|
||||
languages: { $elemMatch: language }
|
||||
})
|
||||
|
||||
await log('languages', {
|
||||
name: language.name,
|
||||
code: language.code,
|
||||
count
|
||||
})
|
||||
}
|
||||
|
||||
const { count: undefinedCount } = await generator.generate(
|
||||
`${PUBLIC_PATH}/languages/undefined.m3u`,
|
||||
{
|
||||
languages: { $size: 0 }
|
||||
}
|
||||
)
|
||||
|
||||
await log('languages', {
|
||||
name: 'Undefined',
|
||||
code: 'undefined',
|
||||
count: undefinedCount
|
||||
})
|
||||
}
|
||||
|
||||
async function generateRegions() {
|
||||
logger.info(`Generating regions/...`)
|
||||
|
||||
for (const region of regions) {
|
||||
const { count } = await generator.generate(
|
||||
`${PUBLIC_PATH}/regions/${region.code.toLowerCase()}.m3u`,
|
||||
{
|
||||
regions: { $elemMatch: region }
|
||||
}
|
||||
)
|
||||
|
||||
await log('regions', {
|
||||
name: region.name,
|
||||
code: region.code,
|
||||
count
|
||||
})
|
||||
}
|
||||
|
||||
const { count: undefinedCount } = await generator.generate(
|
||||
`${PUBLIC_PATH}/regions/undefined.m3u`,
|
||||
{ regions: { $size: 0 } },
|
||||
{ saveEmpty: true }
|
||||
)
|
||||
|
||||
await log('regions', {
|
||||
name: 'Undefined',
|
||||
code: 'UNDEFINED',
|
||||
count: undefinedCount
|
||||
})
|
||||
}
|
||||
|
||||
async function generateIndexNSFW() {
|
||||
logger.info(`Generating index.nsfw.m3u...`)
|
||||
|
||||
await generator.generate(`${PUBLIC_PATH}/index.nsfw.m3u`, {}, { includeNSFW: true })
|
||||
}
|
||||
|
||||
async function generateIndex() {
|
||||
logger.info(`Generating index.m3u...`)
|
||||
|
||||
await generator.generate(`${PUBLIC_PATH}/index.m3u`, {})
|
||||
}
|
||||
|
||||
async function generateIndexCategory() {
|
||||
logger.info(`Generating index.category.m3u...`)
|
||||
|
||||
await generator.generate(
|
||||
`${PUBLIC_PATH}/index.category.m3u`,
|
||||
{},
|
||||
{ sortBy: item => item.group_title }
|
||||
)
|
||||
}
|
||||
|
||||
async function generateIndexCountry() {
|
||||
logger.info(`Generating index.country.m3u...`)
|
||||
|
||||
await generator.generate(
|
||||
`${PUBLIC_PATH}/index.country.m3u`,
|
||||
{},
|
||||
{
|
||||
onLoad: function (items) {
|
||||
let results = items
|
||||
.filter(item => !item.countries.length)
|
||||
.map(item => {
|
||||
const newItem = _.cloneDeep(item)
|
||||
newItem.group_title = ''
|
||||
return newItem
|
||||
})
|
||||
for (const country of _.sortBy(Object.values(countries), ['name'])) {
|
||||
let filtered = items
|
||||
.filter(item => {
|
||||
return item.countries.map(c => c.code).includes(country.code)
|
||||
})
|
||||
.map(item => {
|
||||
const newItem = _.cloneDeep(item)
|
||||
newItem.group_title = country.name
|
||||
return newItem
|
||||
})
|
||||
results = results.concat(filtered)
|
||||
}
|
||||
|
||||
return results
|
||||
},
|
||||
sortBy: item => item.group_title
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function generateIndexLanguage() {
|
||||
logger.info(`Generating index.language.m3u...`)
|
||||
|
||||
await generator.generate(
|
||||
`${PUBLIC_PATH}/index.language.m3u`,
|
||||
{},
|
||||
{
|
||||
onLoad: function (items) {
|
||||
let results = items
|
||||
.filter(item => !item.languages.length)
|
||||
.map(item => {
|
||||
const newItem = _.cloneDeep(item)
|
||||
newItem.group_title = ''
|
||||
return newItem
|
||||
})
|
||||
for (const language of languages) {
|
||||
let filtered = items
|
||||
.filter(item => {
|
||||
return item.languages.map(c => c.code).includes(language.code)
|
||||
})
|
||||
.map(item => {
|
||||
const newItem = _.cloneDeep(item)
|
||||
newItem.group_title = language.name
|
||||
return newItem
|
||||
})
|
||||
results = results.concat(filtered)
|
||||
}
|
||||
|
||||
return results
|
||||
},
|
||||
sortBy: item => item.group_title
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function generateIndexRegion() {
|
||||
logger.info(`Generating index.region.m3u...`)
|
||||
|
||||
await generator.generate(
|
||||
`${PUBLIC_PATH}/index.region.m3u`,
|
||||
{},
|
||||
{
|
||||
onLoad: function (items) {
|
||||
let results = items
|
||||
.filter(item => !item.regions.length)
|
||||
.map(item => {
|
||||
const newItem = _.cloneDeep(item)
|
||||
newItem.group_title = ''
|
||||
return newItem
|
||||
})
|
||||
for (const region of regions) {
|
||||
let filtered = items
|
||||
.filter(item => {
|
||||
return item.regions.map(c => c.code).includes(region.code)
|
||||
})
|
||||
.map(item => {
|
||||
const newItem = _.cloneDeep(item)
|
||||
newItem.group_title = region.name
|
||||
return newItem
|
||||
})
|
||||
results = results.concat(filtered)
|
||||
}
|
||||
|
||||
return results
|
||||
},
|
||||
sortBy: item => item.group_title
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async function generateChannelsJson() {
|
||||
logger.info('Generating channels.json...')
|
||||
|
||||
await generator.generate(`${PUBLIC_PATH}/channels.json`, {}, { format: 'json' })
|
||||
}
|
||||
|
||||
async function setUp() {
|
||||
logger.info(`Loading database...`)
|
||||
const items = await db.find({})
|
||||
categories = _.sortBy(_.uniqBy(_.flatten(items.map(i => i.categories)), 'slug'), ['name'])
|
||||
countries = _.sortBy(_.uniqBy(_.flatten(items.map(i => i.countries)), 'code'), ['name'])
|
||||
languages = _.sortBy(_.uniqBy(_.flatten(items.map(i => i.languages)), 'code'), ['name'])
|
||||
regions = _.sortBy(_.uniqBy(_.flatten(items.map(i => i.regions)), 'code'), ['name'])
|
||||
|
||||
const categoriesLog = `${LOGS_PATH}/generate-playlists/categories.log`
|
||||
const countriesLog = `${LOGS_PATH}/generate-playlists/countries.log`
|
||||
const languagesLog = `${LOGS_PATH}/generate-playlists/languages.log`
|
||||
const regionsLog = `${LOGS_PATH}/generate-playlists/regions.log`
|
||||
|
||||
logger.info(`Creating '${categoriesLog}'...`)
|
||||
await file.create(categoriesLog)
|
||||
logger.info(`Creating '${countriesLog}'...`)
|
||||
await file.create(countriesLog)
|
||||
logger.info(`Creating '${languagesLog}'...`)
|
||||
await file.create(languagesLog)
|
||||
logger.info(`Creating '${regionsLog}'...`)
|
||||
await file.create(regionsLog)
|
||||
}
|
||||
|
||||
async function log(type, data) {
|
||||
await file.append(`${LOGS_PATH}/generate-playlists/${type}.log`, JSON.stringify(data) + '\n')
|
||||
}
|
|
@ -0,0 +1,221 @@
|
|||
const _ = require('lodash')
|
||||
const statuses = require('../data/statuses')
|
||||
const languages = require('../data/languages')
|
||||
const { db, store, parser, file, logger } = require('../core')
|
||||
|
||||
let epgCodes = []
|
||||
let streams = []
|
||||
let checkResults = {}
|
||||
const origins = {}
|
||||
const items = []
|
||||
|
||||
const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs'
|
||||
const EPG_CODES_FILEPATH = process.env.EPG_CODES_FILEPATH || 'scripts/data/codes.json'
|
||||
|
||||
async function main() {
|
||||
await setUp()
|
||||
await loadDatabase()
|
||||
await loadCheckResults()
|
||||
await findStreamOrigins()
|
||||
await updateStreams()
|
||||
await updateDatabase()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function loadDatabase() {
|
||||
logger.info('Loading database...')
|
||||
|
||||
streams = await db.find({})
|
||||
|
||||
logger.info(`Found ${streams.length} streams`)
|
||||
}
|
||||
|
||||
async function loadCheckResults() {
|
||||
logger.info('Loading check results from logs/...')
|
||||
|
||||
const files = await file.list(`${LOGS_PATH}/check-streams/cluster_*.log`)
|
||||
for (const filepath of files) {
|
||||
const results = await parser.parseLogs(filepath)
|
||||
for (const result of results) {
|
||||
checkResults[result._id] = result
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Found ${Object.values(checkResults).length} results`)
|
||||
}
|
||||
|
||||
async function findStreamOrigins() {
|
||||
logger.info('Searching for stream origins...')
|
||||
|
||||
for (const { error, requests } of Object.values(checkResults)) {
|
||||
if (error || !Array.isArray(requests) || !requests.length) continue
|
||||
|
||||
let origin = requests.shift()
|
||||
origin = new URL(origin.url)
|
||||
for (const request of requests) {
|
||||
const curr = new URL(request.url)
|
||||
const key = curr.href.replace(/(^\w+:|^)/, '')
|
||||
if (!origins[key] && curr.host === origin.host) {
|
||||
origins[key] = origin.href
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Found ${_.uniq(Object.values(origins)).length} origins`)
|
||||
}
|
||||
|
||||
async function updateStreams() {
|
||||
logger.info('Updating streams...')
|
||||
|
||||
let updated = 0
|
||||
for (const item of streams) {
|
||||
const stream = store.create(item)
|
||||
const result = checkResults[item._id]
|
||||
|
||||
if (result) {
|
||||
const { error, streams, requests } = result
|
||||
const status = parseStatus(error)
|
||||
const resolution = parseResolution(streams)
|
||||
const origin = findOrigin(requests)
|
||||
|
||||
if (status) {
|
||||
stream.set('status', { status })
|
||||
stream.set('is_broken', { status: stream.get('status') })
|
||||
}
|
||||
|
||||
if (resolution) {
|
||||
stream.set('resolution', { resolution })
|
||||
}
|
||||
|
||||
if (origin) {
|
||||
stream.set('url', { url: origin })
|
||||
}
|
||||
}
|
||||
|
||||
if (!stream.has('logo')) {
|
||||
const logo = findLogo(stream.get('id'))
|
||||
stream.set('logo', { logo })
|
||||
}
|
||||
|
||||
if (!stream.has('guides')) {
|
||||
const guides = findGuides(stream.get('id'))
|
||||
stream.set('guides', { guides })
|
||||
}
|
||||
|
||||
if (!stream.has('countries') && stream.get('src_country')) {
|
||||
const countries = [stream.get('src_country')]
|
||||
stream.set('countries', { countries })
|
||||
}
|
||||
|
||||
if (!stream.has('languages')) {
|
||||
const languages = findLanguages(stream.get('countries'), stream.get('src_country'))
|
||||
stream.set('languages', { languages })
|
||||
}
|
||||
|
||||
if (stream.changed) {
|
||||
stream.set('updated', true)
|
||||
items.push(stream.data())
|
||||
updated++
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Updated ${updated} items`)
|
||||
}
|
||||
|
||||
async function updateDatabase() {
|
||||
logger.info('Updating database...')
|
||||
|
||||
for (const item of items) {
|
||||
await db.update({ _id: item._id }, item)
|
||||
}
|
||||
db.compact()
|
||||
|
||||
logger.info('Done')
|
||||
}
|
||||
|
||||
async function setUp() {
|
||||
try {
|
||||
const codes = await file.read(EPG_CODES_FILEPATH)
|
||||
epgCodes = JSON.parse(codes)
|
||||
} catch (err) {
|
||||
logger.error(err.message)
|
||||
}
|
||||
}
|
||||
|
||||
function findLanguages(countries, src_country) {
|
||||
if (countries && Array.isArray(countries)) {
|
||||
let codes = countries.map(country => country.lang)
|
||||
codes = _.uniq(codes)
|
||||
|
||||
return codes.map(code => languages.find(l => l.code === code)).filter(l => l)
|
||||
}
|
||||
|
||||
if (src_country) {
|
||||
const code = src_country.lang
|
||||
const lang = languages.find(l => l.code === code)
|
||||
|
||||
return lang ? [lang] : []
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
function findOrigin(requests) {
|
||||
if (origins && Array.isArray(requests)) {
|
||||
requests = requests.map(r => r.url.replace(/(^\w+:|^)/, ''))
|
||||
for (const url of requests) {
|
||||
if (origins[url]) {
|
||||
return origins[url]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
function parseResolution(streams) {
|
||||
const resolution = streams
|
||||
.filter(s => s.codec_type === 'video')
|
||||
.reduce(
|
||||
(acc, curr) => {
|
||||
if (curr.height > acc.height) return { width: curr.width, height: curr.height }
|
||||
return acc
|
||||
},
|
||||
{ width: 0, height: 0 }
|
||||
)
|
||||
|
||||
if (resolution.width > 0 && resolution.height > 0) return resolution
|
||||
return null
|
||||
}
|
||||
|
||||
function parseStatus(error) {
|
||||
if (error) {
|
||||
if (error.includes('timed out')) {
|
||||
return statuses['timeout']
|
||||
} else if (error.includes('403')) {
|
||||
return statuses['geo_blocked']
|
||||
}
|
||||
return statuses['offline']
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
function findLogo(id) {
|
||||
const item = epgCodes.find(i => i.tvg_id === id)
|
||||
if (item && item.logo) {
|
||||
return item.logo
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
function findGuides(id) {
|
||||
const item = epgCodes.find(i => i.tvg_id === id)
|
||||
if (item && Array.isArray(item.guides)) {
|
||||
return item.guides
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
const _ = require('lodash')
|
||||
const { generator, db, logger } = require('../core')
|
||||
|
||||
async function main() {
|
||||
let items = await db
|
||||
.find({})
|
||||
.sort({ name: 1, 'status.level': 1, 'resolution.height': -1, url: 1 })
|
||||
items = _.uniqBy(items, 'url')
|
||||
const files = _.groupBy(items, 'filepath')
|
||||
|
||||
for (const filepath in files) {
|
||||
const items = files[filepath]
|
||||
await generator.saveAsM3U(filepath, items, { includeGuides: false })
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
|
@ -0,0 +1,142 @@
|
|||
const { file, markdown, parser, logger } = require('../core')
|
||||
const { program } = require('commander')
|
||||
|
||||
let categories = []
|
||||
let countries = []
|
||||
let languages = []
|
||||
let regions = []
|
||||
|
||||
const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs'
|
||||
|
||||
const options = program
|
||||
.option('-c, --config <config>', 'Set path to config file', '.readme/config.json')
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
async function main() {
|
||||
await setUp()
|
||||
|
||||
await generateCategoryTable()
|
||||
await generateLanguageTable()
|
||||
await generateRegionTable()
|
||||
await generateCountryTable()
|
||||
|
||||
await updateReadme()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function generateCategoryTable() {
|
||||
logger.info('Generating category table...')
|
||||
|
||||
const rows = []
|
||||
for (const category of categories) {
|
||||
rows.push({
|
||||
category: category.name,
|
||||
channels: category.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/categories/${category.slug}.m3u</code>`
|
||||
})
|
||||
}
|
||||
|
||||
const table = markdown.createTable(rows, [
|
||||
{ name: 'Category', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left' }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_categories.md', table)
|
||||
}
|
||||
|
||||
async function generateCountryTable() {
|
||||
logger.info('Generating country table...')
|
||||
|
||||
const rows = []
|
||||
for (const country of countries) {
|
||||
const flag = getCountryFlag(country.code)
|
||||
const prefix = flag ? `${flag} ` : ''
|
||||
|
||||
rows.push({
|
||||
country: prefix + country.name,
|
||||
channels: country.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/countries/${country.code.toLowerCase()}.m3u</code>`
|
||||
})
|
||||
}
|
||||
|
||||
const table = markdown.createTable(rows, [
|
||||
{ name: 'Country', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left' }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_countries.md', table)
|
||||
}
|
||||
|
||||
async function generateRegionTable() {
|
||||
logger.info('Generating region table...')
|
||||
|
||||
const rows = []
|
||||
for (const region of regions) {
|
||||
rows.push({
|
||||
region: region.name,
|
||||
channels: region.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/regions/${region.code.toLowerCase()}.m3u</code>`
|
||||
})
|
||||
}
|
||||
|
||||
const table = markdown.createTable(rows, [
|
||||
{ name: 'Region', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left' }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_regions.md', table)
|
||||
}
|
||||
|
||||
async function generateLanguageTable() {
|
||||
logger.info('Generating language table...')
|
||||
|
||||
const rows = []
|
||||
for (const language of languages) {
|
||||
rows.push({
|
||||
language: language.name,
|
||||
channels: language.count,
|
||||
playlist: `<code>https://iptv-org.github.io/iptv/languages/${language.code}.m3u</code>`
|
||||
})
|
||||
}
|
||||
|
||||
const table = markdown.createTable(rows, [
|
||||
{ name: 'Language', align: 'left' },
|
||||
{ name: 'Channels', align: 'right' },
|
||||
{ name: 'Playlist', align: 'left' }
|
||||
])
|
||||
|
||||
await file.create('./.readme/_languages.md', table)
|
||||
}
|
||||
|
||||
async function updateReadme() {
|
||||
logger.info('Updating README.md...')
|
||||
|
||||
const config = require(file.resolve(options.config))
|
||||
await file.createDir(file.dirname(config.build))
|
||||
await markdown.compile(options.config)
|
||||
}
|
||||
|
||||
async function setUp() {
|
||||
categories = await parser.parseLogs(`${LOGS_PATH}/generate-playlists/categories.log`)
|
||||
countries = await parser.parseLogs(`${LOGS_PATH}/generate-playlists/countries.log`)
|
||||
languages = await parser.parseLogs(`${LOGS_PATH}/generate-playlists/languages.log`)
|
||||
regions = await parser.parseLogs(`${LOGS_PATH}/generate-playlists/regions.log`)
|
||||
}
|
||||
|
||||
function getCountryFlag(code) {
|
||||
switch (code) {
|
||||
case 'UK':
|
||||
return '🇬🇧'
|
||||
case 'INT':
|
||||
return '🌍'
|
||||
case 'UNDEFINED':
|
||||
return ''
|
||||
default:
|
||||
return code.replace(/./g, char => String.fromCodePoint(char.charCodeAt(0) + 127397))
|
||||
}
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
const blocklist = require('../data/blocklist')
|
||||
const parser = require('iptv-playlist-parser')
|
||||
const { file, logger } = require('../core')
|
||||
const { program } = require('commander')
|
||||
|
||||
const options = program
|
||||
.option('--input-dir <input-dir>', 'Set path to input directory', 'channels')
|
||||
.parse(process.argv)
|
||||
.opts()
|
||||
|
||||
async function main() {
|
||||
const files = await file.list(`${options.inputDir}/**/*.m3u`)
|
||||
const errors = []
|
||||
for (const filepath of files) {
|
||||
const content = await file.read(filepath)
|
||||
const playlist = parser.parse(content)
|
||||
const basename = file.basename(filepath)
|
||||
const [_, country] = basename.match(/([a-z]{2})(|_.*)\.m3u/i) || [null, null]
|
||||
|
||||
const items = playlist.items
|
||||
.map(item => {
|
||||
const details = check(item, country)
|
||||
|
||||
return details ? { ...item, details } : null
|
||||
})
|
||||
.filter(i => i)
|
||||
|
||||
items.forEach(item => {
|
||||
errors.push(
|
||||
`${filepath}:${item.line} '${item.details.name}' is on the blocklist due to claims of copyright holders (${item.details.reference})`
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
errors.forEach(error => {
|
||||
logger.error(error)
|
||||
})
|
||||
|
||||
if (errors.length) {
|
||||
logger.info('')
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
function check(channel, country) {
|
||||
return blocklist.find(item => {
|
||||
const regexp = new RegExp(item.regex, 'i')
|
||||
const hasSameName = regexp.test(channel.name)
|
||||
const fromSameCountry = country === item.country.toLowerCase()
|
||||
|
||||
return hasSameName && fromSameCountry
|
||||
})
|
||||
}
|
||||
|
||||
main()
|
Loading…
Reference in New Issue