Create subfolders for commands

This commit is contained in:
Aleksandr Statciuk 2022-02-11 21:07:16 +03:00
parent 5ec8619268
commit c03095f762
24 changed files with 60 additions and 493 deletions

View File

@ -8,7 +8,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Download channels from API
- uses: actions/setup-node@v2
if: ${{ !env.ACT }}
with:
node-version: '14'
cache: 'npm'
- name: Download data from API
run: |
mkdir -p scripts/data
curl -L -o scripts/data/channels.json https://iptv-org.github.io/api/channels.json
@ -17,23 +22,18 @@ jobs:
curl -L -o scripts/data/languages.json https://iptv-org.github.io/api/languages.json
curl -L -o scripts/data/regions.json https://iptv-org.github.io/api/regions.json
curl -L -o scripts/data/guides.json https://iptv-org.github.io/api/guides.json
- uses: actions/setup-node@v2
if: ${{ !env.ACT }}
- uses: actions/upload-artifact@v2
with:
node-version: '14'
cache: 'npm'
name: data
path: scripts/data
- run: npm install
- run: node scripts/commands/create-database.js
- run: node scripts/commands/create-matrix.js
- run: node scripts/commands/database/create.js
- run: node scripts/commands/database/matrix.js
id: create-matrix
- uses: actions/upload-artifact@v2
with:
name: database
path: scripts/database
- uses: actions/upload-artifact@v2
with:
name: data
path: scripts/data
outputs:
matrix: ${{ steps.create-matrix.outputs.matrix }}
load:
@ -57,7 +57,7 @@ jobs:
with:
node-version: '14'
- run: npm install
- run: node scripts/commands/load-cluster.js --cluster-id=${{ matrix.cluster_id }}
- run: node scripts/commands/stream/load.js --cluster-id=${{ matrix.cluster_id }}
- uses: actions/upload-artifact@v2
with:
name: logs
@ -73,6 +73,10 @@ jobs:
- run: git config user.email '84861620+iptv-bot[bot]@users.noreply.github.com'
- run: git checkout -b ${{ steps.create-branch-name.outputs.branch_name }}
- run: curl -L -o scripts/data/codes.json https://iptv-org.github.io/epg/codes.json
- uses: actions/setup-node@v2
if: ${{ !env.ACT }}
with:
node-version: '14'
- uses: actions/download-artifact@v2
with:
name: database
@ -85,25 +89,21 @@ jobs:
with:
name: logs
path: scripts/logs
- uses: actions/setup-node@v2
if: ${{ !env.ACT }}
with:
node-version: '14'
- run: npm install
- run: node scripts/commands/save-results.js
- run: node scripts/commands/database/update.js
- uses: actions/upload-artifact@v2
with:
name: database
path: scripts/database
- run: node scripts/commands/update-playlists.js
- run: node scripts/commands/playlist/update.js
- run: git add channels/*
- run: git commit -m "[Bot] Update playlists"
- run: node scripts/commands/generate-playlists.js
- run: node scripts/commands/playlist/generate.js
- uses: actions/upload-artifact@v2
with:
name: logs
path: scripts/logs
- run: node scripts/commands/update-readme.js
- run: node scripts/commands/readme/update.js
- run: git add README.md
- run: git commit -m "[Bot] Update README.md"
- run: git push -u origin ${{ steps.create-branch-name.outputs.branch_name }}

View File

@ -3,7 +3,12 @@
"scripts": {
"validate": "node scripts/commands/validate.js",
"lint": "npx m3u-linter -c m3u-linter.json",
"test": "jest --runInBand"
"test": "jest --runInBand",
"test:commands": "jest --runInBand -- commands",
"test:commands:database": "jest --runInBand -- database",
"test:commands:playlist": "jest --runInBand -- playlist",
"test:commands:readme": "jest --runInBand -- readme",
"test:commands:stream": "jest --runInBand -- stream"
},
"jest": {
"testRegex": "tests/(.*?/)?.*test.js$",

3
scripts/.gitignore vendored
View File

@ -1,2 +1 @@
logs/
channels.db
logs/

View File

@ -1 +0,0 @@
export.js

View File

@ -1,25 +0,0 @@
const { db, logger } = require('../core')
async function main() {
logger.info(`loading streams...`)
await db.streams.load()
let streams = await db.streams.find({})
logger.info(`removing broken links...`)
let removed = 0
const buffer = {}
for (const stream of streams) {
const duplicate = buffer[stream.channel_id]
if (duplicate && ['offline', 'timeout'].includes(stream.status.code)) {
await db.streams.remove({ _id: stream._id })
removed++
} else {
buffer[stream.channel_id] = stream
}
}
db.streams.compact()
logger.info(`removed ${removed} links`)
}
main()

View File

@ -1,82 +0,0 @@
const { db, file, parser, store, logger, id } = require('../core')
const { program } = require('commander')
const _ = require('lodash')
const options = program
.option(
'--max-clusters <max-clusters>',
'Set maximum number of clusters',
parser.parseNumber,
256
)
.option('--input-dir <input-dir>', 'Set path to input directory', 'channels')
.parse(process.argv)
.opts()
async function main() {
logger.info('starting...')
logger.info(`number of clusters: ${options.maxClusters}`)
await saveToDatabase(await findStreams())
logger.info('done')
}
main()
async function findStreams() {
logger.info(`looking for streams...`)
await db.streams.load()
const files = await file.list(`${options.inputDir}/**/*.m3u`)
const streams = []
for (const filepath of files) {
const items = await parser.parsePlaylist(filepath)
for (const item of items) {
item.filepath = filepath
streams.push(item)
}
}
logger.info(`found ${streams.length} streams`)
return streams
}
async function saveToDatabase(streams = []) {
logger.info('saving to the database...')
await db.streams.reset()
const chunks = split(_.shuffle(streams), options.maxClusters)
for (const [i, chunk] of chunks.entries()) {
for (const item of chunk) {
const stream = store.create()
stream.set('channel_id', { channel_id: item.tvg.id })
stream.set('channel_name', { title: item.name })
stream.set('filepath', { filepath: item.filepath })
stream.set('resolution', { title: item.name })
stream.set('status', { title: item.name })
stream.set('url', { url: item.url })
stream.set('http', { http: item.http })
stream.set('is_broken', { status: stream.get('status') })
stream.set('updated', { updated: false })
stream.set('cluster_id', { cluster_id: i + 1 })
if (!stream.get('channel_id')) {
const channel_id = id.generate(item.name, item.filepath)
stream.set('channel_id', { channel_id })
stream.set('updated', { updated: true })
}
await db.streams.insert(stream.data())
}
}
}
function split(arr, n) {
let result = []
for (let i = n; i > 0; i--) {
result.push(arr.splice(0, Math.ceil(arr.length / i)))
}
return result
}

View File

@ -1,16 +0,0 @@
const { logger, db } = require('../core')
async function main() {
await db.streams.load()
const docs = await db.streams.find({}).sort({ cluster_id: 1 })
const cluster_id = docs.reduce((acc, curr) => {
if (!acc.includes(curr.cluster_id)) acc.push(curr.cluster_id)
return acc
}, [])
const matrix = { cluster_id }
const output = `::set-output name=matrix::${JSON.stringify(matrix)}`
logger.info(output)
}
main()

View File

@ -1,4 +1,4 @@
const { db, generator, api, logger, file } = require('../core')
const { db, generator, api, logger, file } = require('../../core')
const _ = require('lodash')
async function main() {

View File

@ -1,7 +1,7 @@
const _ = require('lodash')
const { create: createPlaylist } = require('../../core/playlist')
const { db, logger, file } = require('../../core')
const { orderBy } = require('natural-orderby')
const { create: createPlaylist } = require('../core/playlist')
const { db, logger, file } = require('../core')
const _ = require('lodash')
async function main() {
await db.streams.load()

View File

@ -1,4 +1,4 @@
const { file, logger, api, parser, blocklist } = require('../core')
const { file, logger, api, parser, blocklist } = require('../../core')
const { program } = require('commander')
const chalk = require('chalk')

View File

@ -1,5 +1,5 @@
const { file, markdown, parser, logger, api } = require('../core')
const { create: createTable } = require('../core/table')
const { file, markdown, parser, logger, api } = require('../../core')
const { create: createTable } = require('../../core/table')
const { program } = require('commander')
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/generators'

View File

@ -1,176 +0,0 @@
const _ = require('lodash')
const statuses = require('../data/statuses')
const { db, store, parser, file, logger } = require('../core')
const items = []
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/load-cluster'
async function main() {
let streams = await loadStreams()
const results = await loadResults()
const origins = await findOrigins(results)
streams = await updateStreams(streams, results, origins)
await updateDatabase(streams)
}
main()
async function loadStreams() {
logger.info('loading streams...')
await db.streams.load()
const streams = await db.streams.find({})
logger.info(`found ${streams.length} streams`)
return streams
}
async function loadResults() {
logger.info('loading results from logs...')
const results = {}
const files = await file.list(`${LOGS_DIR}/cluster_*.log`)
for (const filepath of files) {
const parsed = await parser.parseLogs(filepath)
for (const item of parsed) {
results[item._id] = item
}
}
logger.info(`found ${Object.values(results).length} results`)
return results
}
async function findOrigins(results = {}) {
logger.info('searching for stream origins...')
const origins = {}
for (const { error, requests } of Object.values(results)) {
if (error || !Array.isArray(requests) || !requests.length) continue
let origin = requests.shift()
origin = new URL(origin.url)
for (const request of requests) {
const curr = new URL(request.url)
const key = curr.href.replace(/(^\w+:|^)/, '')
if (!origins[key] && curr.host === origin.host) {
origins[key] = origin.href
}
}
}
logger.info(`found ${_.uniq(Object.values(origins)).length} origins`)
return origins
}
async function updateStreams(items = [], results = {}, origins = {}) {
logger.info('updating streams...')
let updated = 0
const output = []
for (const item of items) {
const stream = store.create(item)
const result = results[item._id]
if (result) {
const { error, streams, requests } = result
const resolution = parseResolution(streams)
const origin = findOrigin(requests, origins)
let status = parseStatus(error)
if (status) {
const prevStatus = item.status
if (prevStatus.code === 'not_247')
// not_247 -> * = not_247
status = item.status
else if (prevStatus.code === 'geo_blocked')
// geo_blocked -> * = geo_blocked
status = item.status
else if (status.code === 'geo_blocked')
// * -> geo_blocked = *
status = item.status
else if (prevStatus.code === 'offline' && status.code === 'online')
// offline -> online = not_247
status = statuses['not_247']
stream.set('status', { status })
stream.set('is_broken', { status: stream.get('status') })
}
if (resolution) {
stream.set('resolution', { resolution })
}
if (origin) {
stream.set('url', { url: origin })
}
}
if (stream.changed) {
stream.set('updated', true)
output.push(stream.data())
updated++
}
}
logger.info(`updated ${updated} streams`)
return output
}
async function updateDatabase(streams = []) {
logger.info('updating database...')
for (const stream of streams) {
await db.streams.update({ _id: stream._id }, stream)
}
db.streams.compact()
logger.info('done')
}
function findOrigin(requests = [], origins = {}) {
if (origins && Array.isArray(requests)) {
requests = requests.map(r => r.url.replace(/(^\w+:|^)/, ''))
for (const url of requests) {
if (origins[url]) {
return origins[url]
}
}
}
return null
}
function parseResolution(streams) {
const resolution = streams
.filter(s => s.codec_type === 'video')
.reduce(
(acc, curr) => {
if (curr.height > acc.height) return { width: curr.width, height: curr.height }
return acc
},
{ width: 0, height: 0 }
)
if (resolution.width > 0 && resolution.height > 0) return resolution
return null
}
function parseStatus(error) {
if (error) {
if (error.includes('timed out')) {
return statuses['timeout']
} else if (error.includes('403')) {
return statuses['geo_blocked']
}
return statuses['offline']
}
return statuses['online']
}

View File

@ -1,5 +1,5 @@
const { db, logger, timer, checker, store, file, parser } = require('../../core')
const { program } = require('commander')
const { db, logger, timer, checker, store, file, parser } = require('../core')
const options = program
.requiredOption('-c, --cluster-id <cluster-id>', 'The ID of cluster to load', parser.parseNumber)
@ -15,7 +15,7 @@ const config = {
debug: options.debug
}
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/load-cluster'
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/stream/load'
async function main() {
logger.info('starting...')

View File

@ -1,37 +0,0 @@
const fs = require('fs-extra')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.copyFileSync(
'tests/__data__/input/database/cleanup-database.streams.db',
'tests/__data__/output/streams.db'
)
const stdout = execSync(
'DB_DIR=tests/__data__/output node scripts/commands/cleanup-database.js',
{
encoding: 'utf8'
}
)
})
it('can remove broken links from database', () => {
expect(content('tests/__data__/output/streams.db')).toEqual(
content('tests/__data__/expected/database/cleanup-database.streams.db')
)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return data
.split('\n')
.filter(l => l)
.map(l => {
return JSON.parse(l)
})
}

View File

@ -1,47 +0,0 @@
const fs = require('fs-extra')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
const stdout = execSync(
'DB_DIR=tests/__data__/output/database node scripts/commands/create-database.js --input-dir=tests/__data__/input/channels --max-clusters=1',
{ encoding: 'utf8' }
)
})
it('can create database', () => {
let output = content('tests/__data__/output/database/streams.db')
let expected = content('tests/__data__/expected/database/streams.db')
output = output.map(i => {
i._id = null
return i
})
expected = expected.map(i => {
i._id = null
return i
})
expect(output).toEqual(
expect.arrayContaining([
expect.objectContaining(expected[0]),
expect.objectContaining(expected[1]),
expect.objectContaining(expected[2])
])
)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return data
.split('\n')
.filter(l => l)
.map(l => {
return JSON.parse(l)
})
}

View File

@ -1,16 +0,0 @@
const fs = require('fs-extra')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.copyFileSync('tests/__data__/input/database/streams.db', 'tests/__data__/output/streams.db')
})
it('can create valid matrix', () => {
const result = execSync('DB_DIR=tests/__data__/output node scripts/commands/create-matrix.js', {
encoding: 'utf8'
})
expect(result).toBe('::set-output name=matrix::{"cluster_id":[1,3]}\n')
})

View File

@ -1,37 +1,35 @@
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
const glob = require('glob')
const { execSync } = require('child_process')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.copyFileSync(
'tests/__data__/input/database/generate-playlists.streams.db',
'tests/__data__/input/database/playlist-generate.streams.db',
'tests/__data__/output/streams.db'
)
const stdout = execSync(
'DB_DIR=tests/__data__/output DATA_DIR=tests/__data__/input/data PUBLIC_DIR=tests/__data__/output/.gh-pages LOGS_DIR=tests/__data__/output/logs/generators node --trace-warnings scripts/commands/generate-playlists.js',
'DB_DIR=tests/__data__/output DATA_DIR=tests/__data__/input/data PUBLIC_DIR=tests/__data__/output/.gh-pages LOGS_DIR=tests/__data__/output/logs/generators node --trace-warnings scripts/commands/playlist/generate.js',
{ encoding: 'utf8' }
)
})
it('can generate playlists', () => {
const files = glob
it('can generate playlists and logs', () => {
const playlists = glob
.sync('tests/__data__/expected/.gh-pages/**/*.m3u')
.map(f => f.replace('tests/__data__/expected/', ''))
files.forEach(filepath => {
playlists.forEach(filepath => {
expect(content(`output/${filepath}`), filepath).toBe(content(`expected/${filepath}`))
})
})
it('can generate logs', () => {
const files = glob
const logs = glob
.sync('tests/__data__/expected/logs/generators/*.log')
.map(f => f.replace('tests/__data__/expected/', ''))
files.forEach(filepath => {
logs.forEach(filepath => {
expect(content(`output/${filepath}`), filepath).toBe(content(`expected/${filepath}`))
})
})

View File

@ -7,12 +7,9 @@ beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.copyFileSync('tests/__data__/input/database/streams.db', 'tests/__data__/output/streams.db')
const stdout = execSync(
'DB_DIR=tests/__data__/output node scripts/commands/update-playlists.js',
{
encoding: 'utf8'
}
)
const stdout = execSync('DB_DIR=tests/__data__/output node scripts/commands/playlist/update.js', {
encoding: 'utf8'
})
})
it('can update playlists', () => {

View File

@ -2,9 +2,12 @@ const { execSync } = require('child_process')
it('show error if channel name in the blocklist', () => {
try {
execSync('node scripts/commands/validate.js tests/__data__/input/channels/us_blocked.m3u', {
encoding: 'utf8'
})
execSync(
'DATA_DIR=tests/__data__/input/data node scripts/commands/playlist/validate.js tests/__data__/input/channels/us_blocked.m3u',
{
encoding: 'utf8'
}
)
} catch (err) {
expect(err.status).toBe(1)
expect(err.stdout).toBe(
@ -15,7 +18,7 @@ it('show error if channel name in the blocklist', () => {
it('show warning if channel has wrong id', () => {
const stdout = execSync(
'node scripts/commands/validate.js tests/__data__/input/channels/wrong_id.m3u',
'DATA_DIR=tests/__data__/input/data node scripts/commands/playlist/validate.js tests/__data__/input/channels/wrong_id.m3u',
{
encoding: 'utf8'
}

View File

@ -1,12 +1,12 @@
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
const stdout = execSync(
'DATA_DIR=tests/__data__/input/data LOGS_DIR=tests/__data__/input/logs/generators node scripts/commands/update-readme.js --config=tests/__data__/input/_readme.json',
'DATA_DIR=tests/__data__/input/data LOGS_DIR=tests/__data__/input/logs/generators node scripts/commands/readme/update.js --config=tests/__data__/input/_readme.json',
{ encoding: 'utf8' }
)
})

View File

@ -1,35 +0,0 @@
const fs = require('fs-extra')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.copyFileSync(
'tests/__data__/input/database/save-results.streams.db',
'tests/__data__/output/streams.db'
)
const stdout = execSync(
'DB_DIR=tests/__data__/output LOGS_DIR=tests/__data__/input/logs/load-cluster node scripts/commands/save-results.js',
{ encoding: 'utf8' }
)
})
it('can save results', () => {
expect(content('tests/__data__/output/streams.db')).toEqual(
content('tests/__data__/expected/database/save-results.streams.db')
)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return data
.split('\n')
.filter(l => l)
.map(l => {
return JSON.parse(l)
})
}

View File

@ -1,20 +1,20 @@
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.copyFileSync('tests/__data__/input/database/streams.db', 'tests/__data__/output/streams.db')
const stdout = execSync(
'DB_DIR=tests/__data__/output LOGS_DIR=tests/__data__/output/logs/load-cluster node scripts/commands/load-cluster.js --cluster-id=1 --timeout=1',
'DB_DIR=tests/__data__/output LOGS_DIR=tests/__data__/output/logs/stream/load node scripts/commands/stream/load.js --cluster-id=1 --timeout=1',
{ encoding: 'utf8' }
)
})
it('return results', () => {
expect(content('tests/__data__/output/logs/load-cluster/cluster_1.log')).toEqual(
content('tests/__data__/expected/logs/load-cluster/cluster_1.log')
expect(content('tests/__data__/output/logs/stream/load/cluster_1.log')).toEqual(
content('tests/__data__/expected/logs/stream/load/cluster_1.log')
)
})