diff --git a/.github/workflows/auto-update.yml b/.github/workflows/auto-update.yml index 89dda0bb4a..0dff5b7ea7 100644 --- a/.github/workflows/auto-update.yml +++ b/.github/workflows/auto-update.yml @@ -8,7 +8,12 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Download channels from API + - uses: actions/setup-node@v2 + if: ${{ !env.ACT }} + with: + node-version: '14' + cache: 'npm' + - name: Download data from API run: | mkdir -p scripts/data curl -L -o scripts/data/channels.json https://iptv-org.github.io/api/channels.json @@ -17,23 +22,18 @@ jobs: curl -L -o scripts/data/languages.json https://iptv-org.github.io/api/languages.json curl -L -o scripts/data/regions.json https://iptv-org.github.io/api/regions.json curl -L -o scripts/data/guides.json https://iptv-org.github.io/api/guides.json - - uses: actions/setup-node@v2 - if: ${{ !env.ACT }} + - uses: actions/upload-artifact@v2 with: - node-version: '14' - cache: 'npm' + name: data + path: scripts/data - run: npm install - - run: node scripts/commands/create-database.js - - run: node scripts/commands/create-matrix.js + - run: node scripts/commands/database/create.js + - run: node scripts/commands/database/matrix.js id: create-matrix - uses: actions/upload-artifact@v2 with: name: database path: scripts/database - - uses: actions/upload-artifact@v2 - with: - name: data - path: scripts/data outputs: matrix: ${{ steps.create-matrix.outputs.matrix }} load: @@ -57,7 +57,7 @@ jobs: with: node-version: '14' - run: npm install - - run: node scripts/commands/load-cluster.js --cluster-id=${{ matrix.cluster_id }} + - run: node scripts/commands/stream/load.js --cluster-id=${{ matrix.cluster_id }} - uses: actions/upload-artifact@v2 with: name: logs @@ -73,6 +73,10 @@ jobs: - run: git config user.email '84861620+iptv-bot[bot]@users.noreply.github.com' - run: git checkout -b ${{ steps.create-branch-name.outputs.branch_name }} - run: curl -L -o scripts/data/codes.json https://iptv-org.github.io/epg/codes.json + - uses: actions/setup-node@v2 + if: ${{ !env.ACT }} + with: + node-version: '14' - uses: actions/download-artifact@v2 with: name: database @@ -85,25 +89,21 @@ jobs: with: name: logs path: scripts/logs - - uses: actions/setup-node@v2 - if: ${{ !env.ACT }} - with: - node-version: '14' - run: npm install - - run: node scripts/commands/save-results.js + - run: node scripts/commands/database/update.js - uses: actions/upload-artifact@v2 with: name: database path: scripts/database - - run: node scripts/commands/update-playlists.js + - run: node scripts/commands/playlist/update.js - run: git add channels/* - run: git commit -m "[Bot] Update playlists" - - run: node scripts/commands/generate-playlists.js + - run: node scripts/commands/playlist/generate.js - uses: actions/upload-artifact@v2 with: name: logs path: scripts/logs - - run: node scripts/commands/update-readme.js + - run: node scripts/commands/readme/update.js - run: git add README.md - run: git commit -m "[Bot] Update README.md" - run: git push -u origin ${{ steps.create-branch-name.outputs.branch_name }} diff --git a/package.json b/package.json index 65eb7539f2..665a220308 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,12 @@ "scripts": { "validate": "node scripts/commands/validate.js", "lint": "npx m3u-linter -c m3u-linter.json", - "test": "jest --runInBand" + "test": "jest --runInBand", + "test:commands": "jest --runInBand -- commands", + "test:commands:database": "jest --runInBand -- database", + "test:commands:playlist": "jest --runInBand -- playlist", + "test:commands:readme": "jest --runInBand -- readme", + "test:commands:stream": "jest --runInBand -- stream" }, "jest": { "testRegex": "tests/(.*?/)?.*test.js$", diff --git a/scripts/.gitignore b/scripts/.gitignore index 001a2070d1..5292519a25 100644 --- a/scripts/.gitignore +++ b/scripts/.gitignore @@ -1,2 +1 @@ -logs/ -channels.db \ No newline at end of file +logs/ \ No newline at end of file diff --git a/scripts/commands/.gitignore b/scripts/commands/.gitignore deleted file mode 100644 index 2d59da2985..0000000000 --- a/scripts/commands/.gitignore +++ /dev/null @@ -1 +0,0 @@ -export.js \ No newline at end of file diff --git a/scripts/commands/cleanup-database.js b/scripts/commands/cleanup-database.js deleted file mode 100644 index 2bc24f0d5b..0000000000 --- a/scripts/commands/cleanup-database.js +++ /dev/null @@ -1,25 +0,0 @@ -const { db, logger } = require('../core') - -async function main() { - logger.info(`loading streams...`) - await db.streams.load() - let streams = await db.streams.find({}) - - logger.info(`removing broken links...`) - let removed = 0 - const buffer = {} - for (const stream of streams) { - const duplicate = buffer[stream.channel_id] - if (duplicate && ['offline', 'timeout'].includes(stream.status.code)) { - await db.streams.remove({ _id: stream._id }) - removed++ - } else { - buffer[stream.channel_id] = stream - } - } - db.streams.compact() - - logger.info(`removed ${removed} links`) -} - -main() diff --git a/scripts/commands/create-database.js b/scripts/commands/create-database.js deleted file mode 100644 index 5d170ef050..0000000000 --- a/scripts/commands/create-database.js +++ /dev/null @@ -1,82 +0,0 @@ -const { db, file, parser, store, logger, id } = require('../core') -const { program } = require('commander') -const _ = require('lodash') - -const options = program - .option( - '--max-clusters ', - 'Set maximum number of clusters', - parser.parseNumber, - 256 - ) - .option('--input-dir ', 'Set path to input directory', 'channels') - .parse(process.argv) - .opts() - -async function main() { - logger.info('starting...') - logger.info(`number of clusters: ${options.maxClusters}`) - - await saveToDatabase(await findStreams()) - - logger.info('done') -} - -main() - -async function findStreams() { - logger.info(`looking for streams...`) - - await db.streams.load() - const files = await file.list(`${options.inputDir}/**/*.m3u`) - const streams = [] - for (const filepath of files) { - const items = await parser.parsePlaylist(filepath) - for (const item of items) { - item.filepath = filepath - streams.push(item) - } - } - logger.info(`found ${streams.length} streams`) - - return streams -} - -async function saveToDatabase(streams = []) { - logger.info('saving to the database...') - - await db.streams.reset() - const chunks = split(_.shuffle(streams), options.maxClusters) - for (const [i, chunk] of chunks.entries()) { - for (const item of chunk) { - const stream = store.create() - stream.set('channel_id', { channel_id: item.tvg.id }) - stream.set('channel_name', { title: item.name }) - stream.set('filepath', { filepath: item.filepath }) - stream.set('resolution', { title: item.name }) - stream.set('status', { title: item.name }) - stream.set('url', { url: item.url }) - stream.set('http', { http: item.http }) - stream.set('is_broken', { status: stream.get('status') }) - stream.set('updated', { updated: false }) - stream.set('cluster_id', { cluster_id: i + 1 }) - - if (!stream.get('channel_id')) { - const channel_id = id.generate(item.name, item.filepath) - - stream.set('channel_id', { channel_id }) - stream.set('updated', { updated: true }) - } - - await db.streams.insert(stream.data()) - } - } -} - -function split(arr, n) { - let result = [] - for (let i = n; i > 0; i--) { - result.push(arr.splice(0, Math.ceil(arr.length / i))) - } - return result -} diff --git a/scripts/commands/create-matrix.js b/scripts/commands/create-matrix.js deleted file mode 100644 index 486111db6a..0000000000 --- a/scripts/commands/create-matrix.js +++ /dev/null @@ -1,16 +0,0 @@ -const { logger, db } = require('../core') - -async function main() { - await db.streams.load() - const docs = await db.streams.find({}).sort({ cluster_id: 1 }) - const cluster_id = docs.reduce((acc, curr) => { - if (!acc.includes(curr.cluster_id)) acc.push(curr.cluster_id) - return acc - }, []) - - const matrix = { cluster_id } - const output = `::set-output name=matrix::${JSON.stringify(matrix)}` - logger.info(output) -} - -main() diff --git a/scripts/commands/generate-playlists.js b/scripts/commands/playlist/generate.js similarity index 97% rename from scripts/commands/generate-playlists.js rename to scripts/commands/playlist/generate.js index 2d9da565e4..7cb1e34412 100644 --- a/scripts/commands/generate-playlists.js +++ b/scripts/commands/playlist/generate.js @@ -1,4 +1,4 @@ -const { db, generator, api, logger, file } = require('../core') +const { db, generator, api, logger, file } = require('../../core') const _ = require('lodash') async function main() { diff --git a/scripts/commands/update-playlists.js b/scripts/commands/playlist/update.js similarity index 82% rename from scripts/commands/update-playlists.js rename to scripts/commands/playlist/update.js index 0196b4a768..d613eb5f53 100644 --- a/scripts/commands/update-playlists.js +++ b/scripts/commands/playlist/update.js @@ -1,7 +1,7 @@ -const _ = require('lodash') +const { create: createPlaylist } = require('../../core/playlist') +const { db, logger, file } = require('../../core') const { orderBy } = require('natural-orderby') -const { create: createPlaylist } = require('../core/playlist') -const { db, logger, file } = require('../core') +const _ = require('lodash') async function main() { await db.streams.load() diff --git a/scripts/commands/validate.js b/scripts/commands/playlist/validate.js similarity index 96% rename from scripts/commands/validate.js rename to scripts/commands/playlist/validate.js index 053f670684..825ea969a9 100644 --- a/scripts/commands/validate.js +++ b/scripts/commands/playlist/validate.js @@ -1,4 +1,4 @@ -const { file, logger, api, parser, blocklist } = require('../core') +const { file, logger, api, parser, blocklist } = require('../../core') const { program } = require('commander') const chalk = require('chalk') diff --git a/scripts/commands/update-readme.js b/scripts/commands/readme/update.js similarity index 96% rename from scripts/commands/update-readme.js rename to scripts/commands/readme/update.js index a3560cac1c..b197c5f9b7 100644 --- a/scripts/commands/update-readme.js +++ b/scripts/commands/readme/update.js @@ -1,5 +1,5 @@ -const { file, markdown, parser, logger, api } = require('../core') -const { create: createTable } = require('../core/table') +const { file, markdown, parser, logger, api } = require('../../core') +const { create: createTable } = require('../../core/table') const { program } = require('commander') const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/generators' diff --git a/scripts/commands/save-results.js b/scripts/commands/save-results.js deleted file mode 100644 index 93b2be85c9..0000000000 --- a/scripts/commands/save-results.js +++ /dev/null @@ -1,176 +0,0 @@ -const _ = require('lodash') -const statuses = require('../data/statuses') -const { db, store, parser, file, logger } = require('../core') - -const items = [] - -const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/load-cluster' - -async function main() { - let streams = await loadStreams() - const results = await loadResults() - const origins = await findOrigins(results) - streams = await updateStreams(streams, results, origins) - - await updateDatabase(streams) -} - -main() - -async function loadStreams() { - logger.info('loading streams...') - - await db.streams.load() - const streams = await db.streams.find({}) - - logger.info(`found ${streams.length} streams`) - - return streams -} - -async function loadResults() { - logger.info('loading results from logs...') - - const results = {} - const files = await file.list(`${LOGS_DIR}/cluster_*.log`) - for (const filepath of files) { - const parsed = await parser.parseLogs(filepath) - for (const item of parsed) { - results[item._id] = item - } - } - - logger.info(`found ${Object.values(results).length} results`) - - return results -} - -async function findOrigins(results = {}) { - logger.info('searching for stream origins...') - - const origins = {} - for (const { error, requests } of Object.values(results)) { - if (error || !Array.isArray(requests) || !requests.length) continue - - let origin = requests.shift() - origin = new URL(origin.url) - for (const request of requests) { - const curr = new URL(request.url) - const key = curr.href.replace(/(^\w+:|^)/, '') - if (!origins[key] && curr.host === origin.host) { - origins[key] = origin.href - } - } - } - - logger.info(`found ${_.uniq(Object.values(origins)).length} origins`) - - return origins -} - -async function updateStreams(items = [], results = {}, origins = {}) { - logger.info('updating streams...') - - let updated = 0 - const output = [] - for (const item of items) { - const stream = store.create(item) - const result = results[item._id] - - if (result) { - const { error, streams, requests } = result - const resolution = parseResolution(streams) - const origin = findOrigin(requests, origins) - let status = parseStatus(error) - - if (status) { - const prevStatus = item.status - if (prevStatus.code === 'not_247') - // not_247 -> * = not_247 - status = item.status - else if (prevStatus.code === 'geo_blocked') - // geo_blocked -> * = geo_blocked - status = item.status - else if (status.code === 'geo_blocked') - // * -> geo_blocked = * - status = item.status - else if (prevStatus.code === 'offline' && status.code === 'online') - // offline -> online = not_247 - status = statuses['not_247'] - - stream.set('status', { status }) - stream.set('is_broken', { status: stream.get('status') }) - } - - if (resolution) { - stream.set('resolution', { resolution }) - } - - if (origin) { - stream.set('url', { url: origin }) - } - } - - if (stream.changed) { - stream.set('updated', true) - output.push(stream.data()) - updated++ - } - } - - logger.info(`updated ${updated} streams`) - - return output -} - -async function updateDatabase(streams = []) { - logger.info('updating database...') - - for (const stream of streams) { - await db.streams.update({ _id: stream._id }, stream) - } - db.streams.compact() - - logger.info('done') -} - -function findOrigin(requests = [], origins = {}) { - if (origins && Array.isArray(requests)) { - requests = requests.map(r => r.url.replace(/(^\w+:|^)/, '')) - for (const url of requests) { - if (origins[url]) { - return origins[url] - } - } - } - - return null -} - -function parseResolution(streams) { - const resolution = streams - .filter(s => s.codec_type === 'video') - .reduce( - (acc, curr) => { - if (curr.height > acc.height) return { width: curr.width, height: curr.height } - return acc - }, - { width: 0, height: 0 } - ) - - if (resolution.width > 0 && resolution.height > 0) return resolution - return null -} - -function parseStatus(error) { - if (error) { - if (error.includes('timed out')) { - return statuses['timeout'] - } else if (error.includes('403')) { - return statuses['geo_blocked'] - } - return statuses['offline'] - } - - return statuses['online'] -} diff --git a/scripts/commands/load-cluster.js b/scripts/commands/stream/load.js similarity index 94% rename from scripts/commands/load-cluster.js rename to scripts/commands/stream/load.js index 57629c4f17..de15bde440 100644 --- a/scripts/commands/load-cluster.js +++ b/scripts/commands/stream/load.js @@ -1,5 +1,5 @@ +const { db, logger, timer, checker, store, file, parser } = require('../../core') const { program } = require('commander') -const { db, logger, timer, checker, store, file, parser } = require('../core') const options = program .requiredOption('-c, --cluster-id ', 'The ID of cluster to load', parser.parseNumber) @@ -15,7 +15,7 @@ const config = { debug: options.debug } -const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/load-cluster' +const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/stream/load' async function main() { logger.info('starting...') diff --git a/tests/__data__/expected/logs/load-cluster/cluster_1.log b/tests/__data__/expected/logs/stream/load/cluster_1.log similarity index 100% rename from tests/__data__/expected/logs/load-cluster/cluster_1.log rename to tests/__data__/expected/logs/stream/load/cluster_1.log diff --git a/tests/__data__/input/logs/load-cluster/cluster_1.log b/tests/__data__/input/logs/stream/load/cluster_1.log similarity index 100% rename from tests/__data__/input/logs/load-cluster/cluster_1.log rename to tests/__data__/input/logs/stream/load/cluster_1.log diff --git a/tests/commands/cleanup-database.test.js b/tests/commands/cleanup-database.test.js deleted file mode 100644 index 7bc6ff5e44..0000000000 --- a/tests/commands/cleanup-database.test.js +++ /dev/null @@ -1,37 +0,0 @@ -const fs = require('fs-extra') -const path = require('path') -const { execSync } = require('child_process') - -beforeEach(() => { - fs.emptyDirSync('tests/__data__/output') - fs.copyFileSync( - 'tests/__data__/input/database/cleanup-database.streams.db', - 'tests/__data__/output/streams.db' - ) - - const stdout = execSync( - 'DB_DIR=tests/__data__/output node scripts/commands/cleanup-database.js', - { - encoding: 'utf8' - } - ) -}) - -it('can remove broken links from database', () => { - expect(content('tests/__data__/output/streams.db')).toEqual( - content('tests/__data__/expected/database/cleanup-database.streams.db') - ) -}) - -function content(filepath) { - const data = fs.readFileSync(path.resolve(filepath), { - encoding: 'utf8' - }) - - return data - .split('\n') - .filter(l => l) - .map(l => { - return JSON.parse(l) - }) -} diff --git a/tests/commands/create-database.test.js b/tests/commands/create-database.test.js deleted file mode 100644 index c5c1dca2c2..0000000000 --- a/tests/commands/create-database.test.js +++ /dev/null @@ -1,47 +0,0 @@ -const fs = require('fs-extra') -const path = require('path') -const { execSync } = require('child_process') - -beforeEach(() => { - fs.emptyDirSync('tests/__data__/output') - - const stdout = execSync( - 'DB_DIR=tests/__data__/output/database node scripts/commands/create-database.js --input-dir=tests/__data__/input/channels --max-clusters=1', - { encoding: 'utf8' } - ) -}) - -it('can create database', () => { - let output = content('tests/__data__/output/database/streams.db') - let expected = content('tests/__data__/expected/database/streams.db') - - output = output.map(i => { - i._id = null - return i - }) - expected = expected.map(i => { - i._id = null - return i - }) - - expect(output).toEqual( - expect.arrayContaining([ - expect.objectContaining(expected[0]), - expect.objectContaining(expected[1]), - expect.objectContaining(expected[2]) - ]) - ) -}) - -function content(filepath) { - const data = fs.readFileSync(path.resolve(filepath), { - encoding: 'utf8' - }) - - return data - .split('\n') - .filter(l => l) - .map(l => { - return JSON.parse(l) - }) -} diff --git a/tests/commands/create-matrix.test.js b/tests/commands/create-matrix.test.js deleted file mode 100644 index 736e633a89..0000000000 --- a/tests/commands/create-matrix.test.js +++ /dev/null @@ -1,16 +0,0 @@ -const fs = require('fs-extra') -const path = require('path') -const { execSync } = require('child_process') - -beforeEach(() => { - fs.emptyDirSync('tests/__data__/output') - - fs.copyFileSync('tests/__data__/input/database/streams.db', 'tests/__data__/output/streams.db') -}) - -it('can create valid matrix', () => { - const result = execSync('DB_DIR=tests/__data__/output node scripts/commands/create-matrix.js', { - encoding: 'utf8' - }) - expect(result).toBe('::set-output name=matrix::{"cluster_id":[1,3]}\n') -}) diff --git a/tests/commands/generate-playlists.test.js b/tests/commands/playlist/generate.test.js similarity index 79% rename from tests/commands/generate-playlists.test.js rename to tests/commands/playlist/generate.test.js index b322a5750b..055b84e0c2 100644 --- a/tests/commands/generate-playlists.test.js +++ b/tests/commands/playlist/generate.test.js @@ -1,37 +1,35 @@ +const { execSync } = require('child_process') const fs = require('fs-extra') const path = require('path') const glob = require('glob') -const { execSync } = require('child_process') beforeEach(() => { fs.emptyDirSync('tests/__data__/output') fs.copyFileSync( - 'tests/__data__/input/database/generate-playlists.streams.db', + 'tests/__data__/input/database/playlist-generate.streams.db', 'tests/__data__/output/streams.db' ) const stdout = execSync( - 'DB_DIR=tests/__data__/output DATA_DIR=tests/__data__/input/data PUBLIC_DIR=tests/__data__/output/.gh-pages LOGS_DIR=tests/__data__/output/logs/generators node --trace-warnings scripts/commands/generate-playlists.js', + 'DB_DIR=tests/__data__/output DATA_DIR=tests/__data__/input/data PUBLIC_DIR=tests/__data__/output/.gh-pages LOGS_DIR=tests/__data__/output/logs/generators node --trace-warnings scripts/commands/playlist/generate.js', { encoding: 'utf8' } ) }) -it('can generate playlists', () => { - const files = glob +it('can generate playlists and logs', () => { + const playlists = glob .sync('tests/__data__/expected/.gh-pages/**/*.m3u') .map(f => f.replace('tests/__data__/expected/', '')) - files.forEach(filepath => { + playlists.forEach(filepath => { expect(content(`output/${filepath}`), filepath).toBe(content(`expected/${filepath}`)) }) -}) -it('can generate logs', () => { - const files = glob + const logs = glob .sync('tests/__data__/expected/logs/generators/*.log') .map(f => f.replace('tests/__data__/expected/', '')) - files.forEach(filepath => { + logs.forEach(filepath => { expect(content(`output/${filepath}`), filepath).toBe(content(`expected/${filepath}`)) }) }) diff --git a/tests/commands/update-playlists.test.js b/tests/commands/playlist/update.test.js similarity index 83% rename from tests/commands/update-playlists.test.js rename to tests/commands/playlist/update.test.js index dbf8db085d..3ed52ad641 100644 --- a/tests/commands/update-playlists.test.js +++ b/tests/commands/playlist/update.test.js @@ -7,12 +7,9 @@ beforeEach(() => { fs.emptyDirSync('tests/__data__/output') fs.copyFileSync('tests/__data__/input/database/streams.db', 'tests/__data__/output/streams.db') - const stdout = execSync( - 'DB_DIR=tests/__data__/output node scripts/commands/update-playlists.js', - { - encoding: 'utf8' - } - ) + const stdout = execSync('DB_DIR=tests/__data__/output node scripts/commands/playlist/update.js', { + encoding: 'utf8' + }) }) it('can update playlists', () => { diff --git a/tests/commands/validate.test.js b/tests/commands/playlist/validate.test.js similarity index 70% rename from tests/commands/validate.test.js rename to tests/commands/playlist/validate.test.js index e90acdb656..103a18363a 100644 --- a/tests/commands/validate.test.js +++ b/tests/commands/playlist/validate.test.js @@ -2,9 +2,12 @@ const { execSync } = require('child_process') it('show error if channel name in the blocklist', () => { try { - execSync('node scripts/commands/validate.js tests/__data__/input/channels/us_blocked.m3u', { - encoding: 'utf8' - }) + execSync( + 'DATA_DIR=tests/__data__/input/data node scripts/commands/playlist/validate.js tests/__data__/input/channels/us_blocked.m3u', + { + encoding: 'utf8' + } + ) } catch (err) { expect(err.status).toBe(1) expect(err.stdout).toBe( @@ -15,7 +18,7 @@ it('show error if channel name in the blocklist', () => { it('show warning if channel has wrong id', () => { const stdout = execSync( - 'node scripts/commands/validate.js tests/__data__/input/channels/wrong_id.m3u', + 'DATA_DIR=tests/__data__/input/data node scripts/commands/playlist/validate.js tests/__data__/input/channels/wrong_id.m3u', { encoding: 'utf8' } diff --git a/tests/commands/update-readme.test.js b/tests/commands/readme/update.test.js similarity index 90% rename from tests/commands/update-readme.test.js rename to tests/commands/readme/update.test.js index 029e45fc7e..0124e8e7f7 100644 --- a/tests/commands/update-readme.test.js +++ b/tests/commands/readme/update.test.js @@ -1,12 +1,12 @@ +const { execSync } = require('child_process') const fs = require('fs-extra') const path = require('path') -const { execSync } = require('child_process') beforeEach(() => { fs.emptyDirSync('tests/__data__/output') const stdout = execSync( - 'DATA_DIR=tests/__data__/input/data LOGS_DIR=tests/__data__/input/logs/generators node scripts/commands/update-readme.js --config=tests/__data__/input/_readme.json', + 'DATA_DIR=tests/__data__/input/data LOGS_DIR=tests/__data__/input/logs/generators node scripts/commands/readme/update.js --config=tests/__data__/input/_readme.json', { encoding: 'utf8' } ) }) diff --git a/tests/commands/save-results.test.js b/tests/commands/save-results.test.js deleted file mode 100644 index b5e3744525..0000000000 --- a/tests/commands/save-results.test.js +++ /dev/null @@ -1,35 +0,0 @@ -const fs = require('fs-extra') -const path = require('path') -const { execSync } = require('child_process') - -beforeEach(() => { - fs.emptyDirSync('tests/__data__/output') - fs.copyFileSync( - 'tests/__data__/input/database/save-results.streams.db', - 'tests/__data__/output/streams.db' - ) - - const stdout = execSync( - 'DB_DIR=tests/__data__/output LOGS_DIR=tests/__data__/input/logs/load-cluster node scripts/commands/save-results.js', - { encoding: 'utf8' } - ) -}) - -it('can save results', () => { - expect(content('tests/__data__/output/streams.db')).toEqual( - content('tests/__data__/expected/database/save-results.streams.db') - ) -}) - -function content(filepath) { - const data = fs.readFileSync(path.resolve(filepath), { - encoding: 'utf8' - }) - - return data - .split('\n') - .filter(l => l) - .map(l => { - return JSON.parse(l) - }) -} diff --git a/tests/commands/load-cluster.test.js b/tests/commands/stream/load.test.js similarity index 72% rename from tests/commands/load-cluster.test.js rename to tests/commands/stream/load.test.js index be92dfa426..4c08b01a93 100644 --- a/tests/commands/load-cluster.test.js +++ b/tests/commands/stream/load.test.js @@ -1,20 +1,20 @@ +const { execSync } = require('child_process') const fs = require('fs-extra') const path = require('path') -const { execSync } = require('child_process') beforeEach(() => { fs.emptyDirSync('tests/__data__/output') fs.copyFileSync('tests/__data__/input/database/streams.db', 'tests/__data__/output/streams.db') const stdout = execSync( - 'DB_DIR=tests/__data__/output LOGS_DIR=tests/__data__/output/logs/load-cluster node scripts/commands/load-cluster.js --cluster-id=1 --timeout=1', + 'DB_DIR=tests/__data__/output LOGS_DIR=tests/__data__/output/logs/stream/load node scripts/commands/stream/load.js --cluster-id=1 --timeout=1', { encoding: 'utf8' } ) }) it('return results', () => { - expect(content('tests/__data__/output/logs/load-cluster/cluster_1.log')).toEqual( - content('tests/__data__/expected/logs/load-cluster/cluster_1.log') + expect(content('tests/__data__/output/logs/stream/load/cluster_1.log')).toEqual( + content('tests/__data__/expected/logs/stream/load/cluster_1.log') ) })