Merge pull request #6439 from iptv-org/patch-02.2022

Patch 02.2022
This commit is contained in:
Aleksandr Statciuk 2022-02-21 14:45:44 +03:00 committed by GitHub
commit a655c15189
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
426 changed files with 5659 additions and 39721 deletions

View File

@ -1,58 +1,49 @@
name: 📺 Channel Request
description: Request to add a channel into the playlist
title: "Add: "
labels: [ "channel request" ]
title: 'Add: '
labels: ['channel request']
body:
- type: markdown
attributes:
value: |
Please fill out the issue template as much as you can so we could efficiently process your request
**IMPORTANT**: An issue may contain a request for only one channel, otherwise it will be closed
Please fill out the issue template as much as you can so we could efficiently process your request
- type: input
id: name
attributes:
label: Channel Name
description: Full name of the channel. It is recommended to use the name listed on [lyngsat](https://www.lyngsat.com/search.html) or [wikipedia](https://www.wikipedia.org/) if possible.
placeholder: 'Fox Life Russia'
validations:
required: true
- type: input
id: origin
attributes:
label: Country
description: Country from which the channel originates from
validations:
required: true
- type: input
id: lang
attributes:
label: Language
validations:
required: true
- type: input
id: source
attributes:
label: Website
description: Where did you find the broadcast?
placeholder: 'ex. https://www.filmon.com/channel/strange-paradise'
placeholder: 'BBC America East'
validations:
required: true
- type: input
attributes:
label: Channel ID
description: Unique channel ID from iptv-org/database. A complete list of supported channels can be found on [iptv-org.github.io](https://iptv-org.github.io/).
placeholder: 'BBCAmericaEast.us'
- type: input
attributes:
label: Website
description: Where did you find the broadcast?
placeholder: 'https://example.com/live-tv'
validations:
required: true
- type: input
id: stream
attributes:
label: Stream URL
description: Link to a stream in m3u8 format
placeholder: 'ex. https://lnc-kdfw-fox-aws.tubi.video/index.m3u8'
- type: input
id: notes
placeholder: 'https://example.com/playlist.m3u8'
- type: textarea
attributes:
label: Notes
placeholder: 'Anything else we should know about this broadcast? Is it 24/7?'
description: 'Anything else we should know about this broadcast?'
- type: checkboxes
attributes:
label: Please confirm the following
options:
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md#request-a-channel)

View File

@ -1,61 +1,61 @@
name: 🛠 Broken Stream
description: Report a broken stream
title: "Replace: "
labels: [ "broken stream" ]
title: 'Replace: '
labels: ['broken stream']
body:
- type: markdown
attributes:
value: |
Please fill out the issue template as much as you can so we could efficiently process your request
**IMPORTANT**: An issue may contain a request for only one channel, otherwise it will be closed
Please fill out the issue template as much as you can so we could efficiently process your request
- type: input
id: name
attributes:
label: Channel Name
description: Full name of the channel. Please use the exact name, as in how it appears in the playlist.
placeholder: 'Fox Life Russia'
label: Stream Title
description: Please use the exact title, as in how it appears in the playlist.
placeholder: 'BBC America East (720p) [Geo-blocked]'
validations:
required: true
- type: dropdown
id: reason
attributes:
label: What happened to the stream?
options:
- It is disappeared from the playlist
- It is stuck at a single frame/the same segment
- I see visual artifacts
- It is buffering to unplayable point
- I see visual artifacts
- Other
validations:
required: true
- type: input
id: playlist
attributes:
label: Playlist
description: What playlist was used to get the channel from?
placeholder: 'ex. https://iptv-org.github.io/iptv/countries/au.m3u'
placeholder: 'https://iptv-org.github.io/iptv/countries/au.m3u'
validations:
required: true
- type: input
id: link
attributes:
label: Broken Link
description: Please specify the broken link from a playlist if you can
placeholder: 'ex. https://lnc-kdfw-fox-aws.tubi.video/index.m3u8'
placeholder: 'https://lnc-kdfw-fox-aws.tubi.video/index.m3u8'
- type: input
id: alt
attributes:
label: Possible Replacement
description: If you know an alternate non-protected source or the way to fix current stream please let us know
description: If you know an alternate source or the way to fix current stream please let us know
placeholder: 'ex. https://lnc-kdfw-fox-aws.tubi.video/index.m3u8'
- type: input
id: notes
attributes:
label: Notes
placeholder: 'Anything else we should know?'
- type: checkboxes
attributes:
label: Please confirm the following
options:
- label: I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md#report-a-broken-stream)

View File

@ -1,7 +1,7 @@
name: 🐞 Bug Report
description: Report an error in this repository
title: "Fix: "
labels: [ "bug" ]
title: 'Fix: '
labels: ['bug']
assignees:
- freearhey
@ -9,14 +9,11 @@ body:
- type: markdown
attributes:
value: |
This form is **ONLY** intended for auto-update, channel sorting and other automation scripts related issues.
If you're experiencing problems viewing a channel, **this is not the right form**. Please fill a [Broken stream](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=broken+stream&template=-----broken-stream.yml&title=Fix%3A+) form instead.
Please describe the error in as much detail as possible so that we can fix it quickly.
This form is only for reporting bugs with auto-update, channel sorting and other problems with automation scripts. If you're experiencing problems viewing a channel please fill a [Broken Stream](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=broken+stream&template=-----broken-stream.yml&title=Fix%3A+) form instead.
- type: textarea
id: bug
attributes:
label: Describe your issue
description: Please describe the error in as much detail as possible so that we can fix it quickly.
validations:
required: true

View File

@ -1,10 +1,15 @@
name: ❌ Removal Request
description: Request to remove a link
description: Request to remove content
title: 'Remove: '
labels: ['removal request']
assignees:
- freearhey
body:
- type: markdown
attributes:
value: |
This form is only for requests from the copyright owner or an agent authorized to act on behalf of the copyright owner. If you're experiencing problems viewing a channel please fill a [Broken Stream](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=broken+stream&template=-----broken-stream.yml&title=Fix%3A+) form instead.
- type: input
attributes:
label: Your full legal name

View File

@ -3,6 +3,6 @@ contact_links:
- name: 💡 Feature Request
url: https://github.com/iptv-org/iptv/discussions/new
about: For any ideas or feature requests
- name: ❓ Ask a question
- name: ❓ Ask a Question
url: https://github.com/iptv-org/iptv/discussions/new
about: Ask questions about this project

View File

@ -13,14 +13,28 @@ jobs:
with:
node-version: '14'
cache: 'npm'
- name: Download data from API
run: |
mkdir -p scripts/data
curl -L -o scripts/data/blocklist.json https://iptv-org.github.io/api/blocklist.json
curl -L -o scripts/data/categories.json https://iptv-org.github.io/api/categories.json
curl -L -o scripts/data/channels.json https://iptv-org.github.io/api/channels.json
curl -L -o scripts/data/countries.json https://iptv-org.github.io/api/countries.json
curl -L -o scripts/data/guides.json https://iptv-org.github.io/api/guides.json
curl -L -o scripts/data/languages.json https://iptv-org.github.io/api/languages.json
curl -L -o scripts/data/regions.json https://iptv-org.github.io/api/regions.json
- uses: actions/upload-artifact@v2
with:
name: data
path: scripts/data
- run: npm install
- run: node scripts/commands/create-database.js
- run: node scripts/commands/create-matrix.js
id: create-matrix
- run: npm run db:create
- uses: actions/upload-artifact@v2
with:
name: database
path: scripts/channels.db
path: scripts/database
- run: npm run db:matrix
id: create-matrix
outputs:
matrix: ${{ steps.create-matrix.outputs.matrix }}
load:
@ -30,19 +44,21 @@ jobs:
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.setup.outputs.matrix) }}
# matrix:
# cluster_id: [1]
steps:
- uses: actions/checkout@v2
- uses: actions/download-artifact@v2
with:
name: database
path: scripts
- uses: FedericoCarboni/setup-ffmpeg@v1
- uses: actions/setup-node@v2
if: ${{ !env.ACT }}
with:
node-version: '14'
- uses: actions/download-artifact@v2
with:
name: database
path: scripts/database
- run: npm install
- run: node scripts/commands/check-streams.js --cluster-id=${{ matrix.cluster_id }}
- run: npm run cluster:load -- --cluster-id=${{ matrix.cluster_id }}
- uses: actions/upload-artifact@v2
with:
name: logs
@ -57,45 +73,72 @@ jobs:
- run: git config user.name 'iptv-bot[bot]'
- run: git config user.email '84861620+iptv-bot[bot]@users.noreply.github.com'
- run: git checkout -b ${{ steps.create-branch-name.outputs.branch_name }}
- run: curl -L -o scripts/data/codes.json https://iptv-org.github.io/epg/codes.json
- uses: actions/download-artifact@v2
with:
name: database
path: scripts
- uses: actions/download-artifact@v2
with:
name: logs
path: scripts/logs
- uses: actions/setup-node@v2
if: ${{ !env.ACT }}
with:
node-version: '14'
- run: npm install
- run: node scripts/commands/update-database.js
- uses: actions/upload-artifact@v2
with:
name: database
path: scripts/channels.db
- run: node scripts/commands/update-playlists.js
- run: git add channels/*
- run: git commit -m "[Bot] Update playlists"
- run: node scripts/commands/generate-playlists.js
- uses: actions/upload-artifact@v2
with:
name: logs
path: scripts/logs
- run: node scripts/commands/update-readme.js
- run: git add README.md
- run: git commit -m "[Bot] Update README.md"
- run: git push -u origin ${{ steps.create-branch-name.outputs.branch_name }}
- uses: tibdex/github-app-token@v1
if: ${{ !env.ACT }}
id: create-app-token
with:
app_id: ${{ secrets.APP_ID }}
private_key: ${{ secrets.APP_PRIVATE_KEY }}
- uses: repo-sync/pull-request@v2
- uses: actions/setup-node@v2
if: ${{ !env.ACT }}
with:
node-version: '14'
- uses: actions/download-artifact@v2
with:
name: data
path: scripts/data
- uses: actions/download-artifact@v2
with:
name: database
path: scripts/database
- uses: actions/download-artifact@v2
with:
name: logs
path: scripts/logs
- run: npm install
- run: npm run db:update
- uses: actions/upload-artifact@v2
with:
name: database
path: scripts/database
- run: npm run playlist:update
- run: npm run playlist:generate
- uses: actions/upload-artifact@v2
with:
name: logs
path: scripts/logs
- uses: JamesIves/github-pages-deploy-action@4.1.1
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
with:
branch: gh-pages
folder: .gh-pages
token: ${{ steps.create-app-token.outputs.token }}
git-config-name: iptv-bot[bot]
git-config-email: 84861620+iptv-bot[bot]@users.noreply.github.com
commit-message: '[Bot] Generate playlists'
- run: npm run db:export
- uses: JamesIves/github-pages-deploy-action@4.1.1
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
with:
repository-name: iptv-org/api
branch: gh-pages
folder: .api
token: ${{ steps.create-app-token.outputs.token }}
git-config-name: iptv-bot[bot]
git-config-email: 84861620+iptv-bot[bot]@users.noreply.github.com
commit-message: '[Bot] Deploy to iptv-org/api'
clean: false
- run: npm run readme:update
- name: Commit Changes
if: ${{ !env.ACT }}
run: |
git add streams/*
git commit -m "[Bot] Update streams"
git add README.md
git commit -m "[Bot] Update README.md"
git push -u origin ${{ steps.create-branch-name.outputs.branch_name }}
- uses: repo-sync/pull-request@v2
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
id: pull-request
with:
github_token: ${{ steps.create-app-token.outputs.token }}
@ -107,17 +150,8 @@ jobs:
[1]: https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}
- uses: juliangruber/merge-pull-request-action@v1
if: ${{ github.ref == 'refs/heads/master' }}
if: ${{ !env.ACT && github.ref == 'refs/heads/master' }}
with:
github-token: ${{ secrets.PAT }}
number: ${{ steps.pull-request.outputs.pr_number }}
method: squash
- uses: JamesIves/github-pages-deploy-action@4.1.1
if: ${{ github.ref == 'refs/heads/master' }}
with:
branch: gh-pages
folder: .gh-pages
token: ${{ steps.create-app-token.outputs.token }}
git-config-name: iptv-bot[bot]
git-config-email: 84861620+iptv-bot[bot]@users.noreply.github.com
commit-message: '[Bot] Generate playlists'

View File

@ -8,11 +8,26 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: tj-actions/changed-files@v12.2
id: files
with:
files: streams/*.m3u
- uses: actions/setup-node@v2
if: ${{ !env.ACT }}
if: ${{ !env.ACT && steps.files.outputs.any_changed == 'true' }}
with:
node-version: '14'
cache: 'npm'
- run: npm install
- run: npm run lint
- run: npm run validate
- name: download data from api
if: steps.files.outputs.any_changed == 'true'
run: |
mkdir -p scripts/data
curl -L -o scripts/data/blocklist.json https://iptv-org.github.io/api/blocklist.json
curl -L -o scripts/data/channels.json https://iptv-org.github.io/api/channels.json
- name: validate
if: steps.files.outputs.any_changed == 'true'
run: |
npm install
npm run playlist:lint -- ${{ steps.files.outputs.all_changed_files }}
npm run playlist:validate -- ${{ steps.files.outputs.all_changed_files }}

View File

@ -1,39 +0,0 @@
name: cleanup
on:
workflow_dispatch:
jobs:
cleanup:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: echo "::set-output name=branch_name::$(date +'bot/cleanup-%s')"
id: create-branch-name
- run: |
git config user.name 'iptv-bot[bot]'
git config user.email '84861620+iptv-bot[bot]@users.noreply.github.com'
- run: git checkout -b ${{ steps.create-branch-name.outputs.branch_name }}
- run: npm install
- run: node scripts/commands/create-database.js
- run: node scripts/commands/cleanup-database.js
- run: node scripts/commands/update-playlists.js
- run: |
git add channels/*
git commit -m "[Bot] Update playlists"
- uses: tibdex/github-app-token@v1
if: ${{ !env.ACT }}
id: create-app-token
with:
app_id: ${{ secrets.APP_ID }}
private_key: ${{ secrets.APP_PRIVATE_KEY }}
- uses: repo-sync/pull-request@v2
if: ${{ github.ref == 'refs/heads/master' }}
id: pull-request
with:
github_token: ${{ steps.create-app-token.outputs.token }}
source_branch: ${{ steps.create-branch-name.outputs.branch_name }}
destination_branch: 'master'
pr_title: '[Bot] Remove broken links'
pr_body: |
This pull request is created by [cleanup][1] workflow.
[1]: https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}

1
.gitignore vendored
View File

@ -1,5 +1,4 @@
node_modules
database
.artifacts
.secrets
.actrc

View File

@ -17,7 +17,6 @@
| Kids | Programming targeted to children |
| Legislative | Programming specific to the operation of government |
| Lifestyle | Programs related to health, fitness, leisure, fashion, decor, etc. |
| Local | Channels that are targeted for a local area or region |
| Movies | Channels that only show movies |
| Music | Programming is music or music related |
| News | Programming is mostly news |
@ -31,4 +30,3 @@
| Travel | Programming is travel related |
| Weather | Programming is focused on weather |
| XXX | Programming is adult oriented and x-rated |
| Other | No category assigned |

View File

@ -1,8 +0,0 @@
## Supported Statuses
| Label | Description |
| ----------- | ------------------------------------------------- |
| Geo-blocked | Channel is only available in selected countries. |
| Not 24/7 | Broadcast is not available 24 hours a day. |
| Timeout | Server does not respond for more than 60 seconds. |
| Offline | The broadcast does not work for any other reason. |

View File

@ -2,23 +2,20 @@
[![auto-update](https://github.com/iptv-org/iptv/actions/workflows/auto-update.yml/badge.svg)](https://github.com/iptv-org/iptv/actions/workflows/auto-update.yml)
Collection of publicly available IPTV channels from all over the world.
Collection of publicly available IPTV (Internet Protocol television) channels from all over the world.
## Usage
To watch IPTV you just need to paste this link `https://iptv-org.github.io/iptv/index.m3u` to any player which supports M3U-playlists.
![VLC Network Panel](https://github.com/iptv-org/iptv/raw/master/.readme/preview.png)
![VLC Network Panel](.readme/preview.png)
To watch IPTV, simply insert one of the links below into any player that supports M3U playlists:
Also you can instead use one of these playlists:
- `https://iptv-org.github.io/iptv/index.category.m3u` (grouped by category)
- `https://iptv-org.github.io/iptv/index.language.m3u` (grouped by language)
- `https://iptv-org.github.io/iptv/index.country.m3u` (grouped by country)
- `https://iptv-org.github.io/iptv/index.region.m3u` (grouped by region)
- `https://iptv-org.github.io/iptv/index.m3u`
- `https://iptv-org.github.io/iptv/index.nsfw.m3u` (includes adult channels)
Or select one of the playlists from the list below.
- `https://iptv-org.github.io/iptv/index.category.m3u` (grouped by category)
- `https://iptv-org.github.io/iptv/index.country.m3u` (grouped by country)
- `https://iptv-org.github.io/iptv/index.language.m3u` (grouped by language)
- `https://iptv-org.github.io/iptv/index.region.m3u` (grouped by region)
### Playlists by category
@ -42,17 +39,6 @@ Or select one of the playlists from the list below.
</details>
### Playlists by region
<details>
<summary>Expand</summary>
<br>
<!-- prettier-ignore -->
#include "./.readme/_regions.md"
</details>
### Playlists by country
<details>
@ -64,71 +50,32 @@ Or select one of the playlists from the list below.
</details>
## For Developers
In addition to the above methods, you can also get a list of all available channels in JSON format.
To do this, you just have to make a GET request to:
```
https://iptv-org.github.io/iptv/channels.json
```
If successful, you should get the following response:
### Playlists by region
<details>
<summary>Expand</summary>
<br>
```
[
...
{
"name": "CNN",
"logo": "https://i.imgur.com/ilZJT5s.png",
"url": "http://ott-cdn.ucom.am/s27/index.m3u8",
"categories": [
{
"name": "News",
"slug": "news"
}
],
"countries": [
{
"code": "us",
"name": "United States"
},
{
"code": "ca",
"name": "Canada"
}
],
"languages": [
{
"code": "eng",
"name": "English"
}
],
"tvg": {
"id": "cnn.us",
"name": "CNN",
"url": "http://epg.streamstv.me/epg/guide-usa.xml.gz"
}
},
...
]
```
<!-- prettier-ignore -->
#include "./.readme/_regions.md"
</details>
## EPG
Playlists already have a built-in list of EPG, so players that support the `url-tvg` tag should load it automatically. If not, you can find a list of available programs here:
The playlists already contain links to all guides, so players with support the `x-tvg-url` tag should load it automatically. Otherwise, you can choose one of the guides featured in the [iptv-org/epg](https://github.com/iptv-org/epg) repository.
https://github.com/iptv-org/epg
## Database
If you find an error in the description of the channel, please create an issue in the [iptv-org/database](https://github.com/iptv-org/database) repository.
## API
The API documentation can be found in the [iptv-org/api](https://github.com/iptv-org/api) repository.
## Resources
You can find links to various IPTV related resources in this repository [iptv-org/awesome-iptv](https://github.com/iptv-org/awesome-iptv).
Links to other useful IPTV-related resources can be found in the [iptv-org/awesome-iptv](https://github.com/iptv-org/awesome-iptv) repository.
## Contribution
@ -137,3 +84,7 @@ Please make sure to read the [Contributing Guide](CONTRIBUTING.md) before sendin
## Legal
No video files are stored in this repository. The repository simply contains user-submitted links to publicly available video stream URLs, which to the best of our knowledge have been intentionally made publicly by the copyright holders. If any links in these playlists infringe on your rights as a copyright holder, they may be removed by sending a pull request or opening an issue. However, note that we have **no control** over the destination of the link, and just removing the link from the playlist will not remove its contents from the web. Note that linking does not directly infringe copyright because no copy is made on the site providing the link, and thus this is **not** a valid reason to send a DMCA notice to GitHub. To remove this content from the web, you should contact the web host that's actually hosting the content (**not** GitHub, nor the maintainers of this repository).
## License
[![CC0](http://mirrors.creativecommons.org/presskit/buttons/88x31/svg/cc-zero.svg)](LICENSE)

View File

@ -4,22 +4,22 @@ Before submitting your contribution, please make sure to take a moment and read
- [Issue Reporting Guidelines](#issue-reporting-guidelines)
- [Pull Request Guidelines](#pull-request-guidelines)
- [Channel Description Scheme](#channel-description-scheme)
- [Stream Description Scheme](#stream-description-scheme)
- [Project Structure](#project-structure)
## Issue Reporting Guidelines
### Request a Channel
To request a channel, create an [issue](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=channel+request&template=------channel-request.yml&title=Add%3A+) and complete all details requested. Understand that our community of volunteers will try to help you, but if a public link cannot be found, there is little we can do. (**IMPORTANT:** the issue should contain a request for only one channel, otherwise it will be closed immediately)
To request a channel, create an [issue](https://github.com/iptv-org/iptv/issues/new?labels=channel+request&template=------channel-request.yml&title=Add%3A+) and complete all details requested. Understand that our community of volunteers will try to help you, but if a public link cannot be found, there is little we can do. (**IMPORTANT:** the issue should contain a request for only one channel, otherwise it will be closed immediately)
### Report a Broken Stream
To report a broadcast that is not working, create an [issue](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=broken+stream&template=-----broken-stream.yml&title=Replace%3A+) with a description of the channel. (**IMPORTANT:** an issue should contain a report for only one channel, otherwise it will be closed immediately)
To report a broadcast that is not working, create an [issue](https://github.com/iptv-org/iptv/issues/new?labels=broken+stream&template=-----broken-stream.yml&title=Replace%3A+) with a description of the channel. (**IMPORTANT:** an issue should contain a report for only one channel, otherwise it will be closed immediately)
### Request Channel Removal
### Content Removal Request
Publish your DMCA notice somewhere and send us a link to it through this [form](https://github.com/iptv-org/iptv/issues/new?assignees=&labels=DMCA&template=--remove-channel.yml&title=Remove%3A+).
If you find any content in the repository that you own and you would like us to remove, please create an [issue](https://github.com/iptv-org/iptv/issues/new?assignees=freearhey&labels=removal+request&template=--removal-request.yml&title=Remove%3A+) and provide all necessary information. If the request is granted, the specified content will be removed from the repository within one business day.
## Pull Request Guidelines
@ -31,72 +31,13 @@ If you would like to replace a broken stream or add a new one, please do the fol
- check if the channel is working outside your country by using a VPN or use a service like [streamtest.in](https://streamtest.in/)
- find out from which country the channel is being broadcasted. This information can usually be found on [lyngsat.com](https://www.lyngsat.com/search.html) or [wikipedia.org](https://www.wikipedia.org/). If you are unable to determine which country the channel belongs to, add the channel onto the `channels/unsorted.m3u` playlist
- find the corresponding [ISO_3166-2 code](https://en.wikipedia.org/wiki/ISO_3166-2) for the country
- open the `/channels` folder and find the file that has the same code in its name and open it
- open the `/streams` folder and find the file that has the same code in its name and open it
- if broken, find the broken link in this file and replace it with working one
- if new, at the very end of this file add a link to the channel with a description
- if the broadcast is not available outside of a certain country, add the label `[Geo-blocked]` to the end of the channel name and list these countries in the `tvg-country` attribute
- if the broadcast is not available 24 hours a day, add the label `[Not 24/7]`
- commit all changes and send a pull request
### Add a Category to a Channel
- select a channel that does not have a category specified
- find the file that contains the channel. You can use a [GitHub Search](https://github.com/search/advanced?q=CHANNEL_NAME+repo%3Aiptv-org%2Fiptv+path%3A%2Fchannels&type=Code) to do this
- find the desired channel in this file
- specify the appropriate category in the `group-title` attribute. A complete list of supported categories can be found [here](https://github.com/iptv-org/iptv/blob/master/.readme/supported-categories.md)
- commit all changes and send a pull request
### Add a Language to a Channel
- select a channel that does not have a language specified
- find the file that contains the channel. You can use a [GitHub Search](https://github.com/search/advanced?q=CHANNEL_NAME+repo%3Aiptv-org%2Fiptv+path%3A%2Fchannels&type=Code) to do this.
- find the desired channel in this file
- specify the appropriate language in the `tvg-language` attribute. The name of the language must comply with the [ISO 639-3](https://iso639-3.sil.org/code_tables/639/data?title=&field_iso639_cd_st_mmbrshp_639_1_tid=94671&name_3=&field_iso639_element_scope_tid=All&field_iso639_language_type_tid=51&items_per_page=500) standard.
- commit all changes and send a pull request
If a channel is broadcasted in several languages at once, you can specify them all through a semicolon, like this:
```xml
#EXTINF:-1 tvg-language="English;Chinese",CCTV
http://example.com/cctv.m3u8
```
### Add a Country to a Channel
- select a channel that does not have a country specified
- find out in which country the channel is broadcast. This information can usually be found in the channel description on Wikipedia.
- find the corresponding [ISO_3166-2 code](https://en.wikipedia.org/wiki/ISO_3166-2) corresponding to the country
- find the file that contains the channel. You can use a [GitHub Search](https://github.com/search/advanced?q=CHANNEL_NAME+repo%3Aiptv-org%2Fiptv+path%3A%2Fchannels&type=Code) to do this.
- find the desired channel in this file
- paste the country ISO_3166-2 code into `tvg-country` attribute of the channel description
- commit all changes and send a pull request
If a channel is broadcasted in several countries at once, you can specify them all through a semicolon, like this:
```xml
#EXTINF:-1 tvg-country="US;CA",CNN
http://example.com/cnn.m3u8
```
If a channel is broadcast for an entire region, you can use one of the [supported region code](https://github.com/iptv-org/iptv/blob/master/.readme/supported-regions.md) to avoid listing all countries. In this case the channel will be added to the playlists of all countries from that region.
In case the channel is broadcast worldwide you can use the code `INT`:
```xml
#EXTINF:-1 tvg-country="INT",CNN
http://example.com/cnn.m3u8
```
### Sort channels from `channels/unsorted.m3u`
- select any channel from [channels/unsorted.m3u](https://github.com/iptv-org/iptv/blob/master/channels/unsorted.m3u)
- find out the full name of the channel and from which country it is being broadcasted. This information can usually be found on [lyngsat.com](https://www.lyngsat.com/search.html) or [wikipedia.org](https://www.wikipedia.org/)
- update the channel name if necessary
- find the corresponding [ISO_3166-2 code](https://en.wikipedia.org/wiki/ISO_3166-2) for the country
- open the `channels/` folder and find a file with the same name as the country code
- at the very end of this file add a link to the channel with a description
- commit all changes and send a pull request
### Update README.md
- open `.readme/template.md`
@ -109,40 +50,34 @@ http://example.com/cnn.m3u8
- make the necessary changes
- commit all changes and send a pull request
## Channel Description Scheme
## Stream Description Scheme
For a channel to be approved, its description must follow this template:
For a stream to be approved, its description must follow this template:
```
#EXTINF:-1 tvg-id="EPG_ID" tvg-country="COUNTRY" tvg-language="LANGUAGE" tvg-logo="LOGO_URL" group-title="CATEGORY",FULL_NAME STREAM_TIME_SHIFT (ALTERNATIVE_NAME) (STREAM_RESOLUTION) [STREAM_STATUS]
#EXTINF:-1 tvg-id="CHANNEL_ID",CHANNEL_NAME (RESOLUTION) [LABEL]
STREAM_URL
```
| Attribute | Description |
| ------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `EPG_ID` | Channel ID that is used to load EPG. The same ID is used in [iptv-org/epg](https://iptv-org.github.io/epg/index.html) to search for the corresponding program. (optional) |
| `COUNTRY` | The code of the country in which the channel is broadcast. The code of the country must conform to the standard [ISO_3166-2](https://en.wikipedia.org/wiki/ISO_3166-2). If the channel is broadcast in several countries you can list them separated by a semicolon. You can also use one of these [region codes](https://github.com/iptv-org/iptv/blob/master/.readme/supported-regions.md). (optional) |
| `LANGUAGE` | Channel language. The name of the language must conform to the standard [ISO 639-3](https://iso639-3.sil.org/code_tables/639/data?title=&field_iso639_cd_st_mmbrshp_639_1_tid=94671&name_3=&field_iso639_element_scope_tid=All&field_iso639_language_type_tid=51&items_per_page=500). If the channel is broadcast in several languages you can list them separated by a semicolon. (optional) |
| `LOGO_URL` | The logo of the channel that will be displayed if the player supports it. Supports files in png, jpeg and gif format. (optional) |
| `CATEGORY` | The category to which the channel belongs. The list of currently supported categories can be found [here](https://github.com/iptv-org/iptv/blob/master/.readme/supported-categories.md). (optional) |
| `FULL_NAME` | Full name of the channel. It is recommended to use the name listed on [lyngsat](https://www.lyngsat.com/search.html) or [wikipedia](https://www.wikipedia.org/) if possible. May contain any characters except round and square brackets. |
| `STREAM_TIME_SHIFT` | Must be specified if the channel is broadcast with a shift in time relative to the main stream. Should only contain a number and a sign. (optional) |
| `ALTERNATIVE_NAME` | Can be used to specify a short name or name in another language. May contain any characters except round and square brackets. (optional) |
| `STREAM_RESOLUTION` | The maximum height of the frame with a "p" at the end. In case of VLC Player this information can be found in `Window > Media Information... > Codec Details`. (optional) |
| `STREAM_STATUS` | Specified if the broadcast for some reason is interrupted or does not work in a particular application. The list of currently supported statuses can be found [here](https://github.com/iptv-org/iptv/blob/master/.readme/supported-statuses.md). (optional) |
| `STREAM_URL` | Channel broadcast URL. |
| Attribute | Description | Required | Valid values |
| -------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------- | --------------------------------------------------------------------------------------------------- |
| `CHANNEL_ID` | Channel ID. | Optional | Full list of supported channels could be found on [iptv-org.github.io](https://iptv-org.github.io/) |
| `CHANNEL_NAME` | Full name of the channel. May contain any characters except: `,`, `(`, `)`, `[`, `]`. It is recommended to use the name listed on [lyngsat](https://www.lyngsat.com/search.html) or [wikipedia](https://www.wikipedia.org/) if possible. | Required | - |
| `RESOLUTION` | Maximum stream resolution. | Optional | `2160p`, `1080p`, `720p`, `480p`, `360p` etc |
| `LABEL` | Specified in cases where the broadcast for some reason may not be available to some users. | Optional | `Geo-blocked` or `Not 24/7` |
| `STREAM_URL` | Stream URL. | Required | - |
Example:
```xml
#EXTINF:-1 tvg-id="ExampleTVPlus3.ua" tvg-country="UA" tvg-language="Ukrainian;Russian" tvg-logo="https://i.imgur.com/bu12f89.png" group-title="Kids",Example TV +3 (Пример ТВ) (720p) [not 24/7]
#EXTINF:-1 tvg-id="ExampleTV.ua",Example TV (720p) [Not 24/7]
https://example.com/playlist.m3u8
```
Also, if necessary, you can specify custom HTTP User-Agent or Referrer via the `#EXTVLCOPT` tag:
Also, if necessary, you can specify custom HTTP User-Agent and Referrer via the `#EXTVLCOPT` tag:
```xml
#EXTINF:-1 tvg-id="ExampleTV.us" tvg-country="US" tvg-language="English" tvg-logo="http://example.com/channel-logo.png" group-title="News",Example TV
#EXTINF:-1 tvg-id="ExampleTV.us",Example TV
#EXTVLCOPT:http-referrer=http://example.com/
#EXTVLCOPT:http-user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64)
http://example.com/stream.m3u8
@ -151,24 +86,21 @@ http://example.com/stream.m3u8
## Project Structure
- `.github/`
- `ISSUE_TEMPLATE/`: issue templates for this repository.
- `ISSUE_TEMPLATE/`: issue templates for the repository.
- `workflows/`
- `auto-update.yml`: GitHub workflow that launches daily playlist updates (at 0:00 and 12:00 UTC).
- `check.yml`: GitHub workflow that checks every pull request for syntax errors.
- `cleanup.yml`: GitHub workflow that removes broken links by request.
- `validate.yml`: GitHub workflow that compares channel names with the blocklist each time a pull request is made.
- `check.yml`: GitHub workflow that checks every pull request for syntax errors and blocked channels.
- `CODE_OF_CONDUCT.md`: rules you shouldn't break if you don't want to get banned.
- `.readme/`
- `config.json`: config for the `markdown-include` package, which is used to compile everything into one `README.md` file.
- `preview.png`: image displayed in the `README.md`.
- `supported-categories.md`: list of supported categories.
- `supported-statuses.md`: list of supported statuses.
- `supported-regions.md`: list of supported regions.
- `template.md`: template for `README.md`.
- `channels/`: contains all channels broken down by the country from which they are broadcast.
- `scripts/`: contains all the scripts used in GitHub workflows.
- `streams/`: contains all streams broken down by the country from which they are broadcast.
- ...
- `unsorted.m3u`: playlist with channels not yet sorted.
- `scripts/`: contains all the scripts used in GitHub workflows.
- `tests/`: contains tests to check the scripts in the folder above.
- `tests/`: contains tests to check the scripts.
- `CONTRIBUTING.md`: file you are currently reading.
- `README.md`: project description generated from the contents of the `.readme/` folder.

3704
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,31 +1,46 @@
{
"name": "iptv",
"scripts": {
"validate": "node scripts/commands/validate.js",
"lint": "npx m3u-linter -c m3u-linter.json",
"act:auto-update": "act workflow_dispatch -W .github/workflows/auto-update.yml --artifact-server-path=.artifacts",
"act:check": "act pull_request -W .github/workflows/check.yml",
"db:create": "node scripts/commands/database/create.js",
"db:matrix": "node scripts/commands/database/matrix.js",
"db:update": "node scripts/commands/database/update.js",
"db:export": "node scripts/commands/database/export.js",
"cluster:load": "node scripts/commands/cluster/load.js",
"playlist:validate": "node scripts/commands/playlist/validate.js",
"playlist:generate": "node scripts/commands/playlist/generate.js",
"playlist:update": "node scripts/commands/playlist/update.js",
"playlist:lint": "npx m3u-linter -c m3u-linter.json",
"readme:update": "node scripts/commands/readme/update.js",
"test": "jest --runInBand"
},
"jest": {
"testRegex": "tests/(.*?/)?.*test.js$"
"testRegex": "tests/(.*?/)?.*test.js$",
"setupFilesAfterEnv": [
"@alex_neo/jest-expect-message"
]
},
"author": "Arhey",
"private": true,
"license": "MIT",
"dependencies": {
"chunk": "^0.0.3",
"commander": "^7.0.0",
"crypto": "^1.0.1",
"@alex_neo/jest-expect-message": "^1.0.5",
"chalk": "^4.1.2",
"commander": "^8.3.0",
"dayjs": "^1.10.7",
"fs-extra": "^10.0.0",
"iptv-checker": "^0.22.0",
"iptv-playlist-parser": "^0.10.2",
"jest": "^27.4.3",
"jest": "^27.5.1",
"jest-expect-message": "^1.0.2",
"lodash": "^4.17.21",
"m3u-linter": "^0.3.0",
"markdown-include": "^0.4.3",
"mz": "^2.7.0",
"natural-orderby": "^2.0.3",
"nedb-promises": "^5.0.2",
"normalize-url": "^6.1.0",
"transliteration": "^2.2.0",
"winston": "^3.3.3"
"signale": "^1.4.0",
"transliteration": "^2.2.0"
}
}

4
scripts/.gitignore vendored
View File

@ -1,2 +1,2 @@
logs/
channels.db
/logs/
/database/

View File

@ -1,50 +0,0 @@
const { program } = require('commander')
const { db, logger, timer, checker, store, file, parser } = require('../core')
const options = program
.requiredOption('-c, --cluster-id <cluster-id>', 'The ID of cluster to load', parser.parseNumber)
.option('-t, --timeout <timeout>', 'Set timeout for each request', parser.parseNumber, 60000)
.option('-d, --delay <delay>', 'Set delay for each request', parser.parseNumber, 0)
.option('--debug', 'Enable debug mode')
.parse(process.argv)
.opts()
const config = {
timeout: options.timeout,
delay: options.delay,
debug: options.debug
}
const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs'
async function main() {
logger.info('Starting...')
logger.info(`Timeout: ${options.timeout}ms`)
logger.info(`Delay: ${options.delay}ms`)
timer.start()
const clusterLog = `${LOGS_PATH}/check-streams/cluster_${options.clusterId}.log`
logger.info(`Loading cluster: ${options.clusterId}`)
logger.info(`Creating '${clusterLog}'...`)
await file.create(clusterLog)
const items = await db.find({ cluster_id: options.clusterId })
const total = items.length
logger.info(`Found ${total} links`)
logger.info('Checking...')
const results = {}
for (const [i, item] of items.entries()) {
const message = `[${i + 1}/${total}] ${item.filepath}: ${item.url}`
const result = await checker.check(item, config)
if (!result.error) {
logger.info(message)
} else {
logger.info(`${message} (${result.error})`)
}
await file.append(clusterLog, JSON.stringify(result) + '\n')
}
logger.info(`Done in ${timer.format('HH[h] mm[m] ss[s]')}`)
}
main()

View File

@ -1,24 +0,0 @@
const { db, logger } = require('../core')
async function main() {
logger.info(`Loading database...`)
let streams = await db.find({})
logger.info(`Removing broken links...`)
let removed = 0
const buffer = []
for (const stream of streams) {
const duplicate = buffer.find(i => i.id === stream.id)
if (duplicate && ['offline', 'timeout'].includes(stream.status.code)) {
await db.remove({ _id: stream._id })
removed++
} else {
buffer.push(stream)
}
}
db.compact()
logger.info(`Removed ${removed} links`)
}
main()

View File

@ -0,0 +1,65 @@
const { db, logger, timer, checker, store, file, parser } = require('../../core')
const { program } = require('commander')
const options = program
.requiredOption('-c, --cluster-id <cluster-id>', 'The ID of cluster to load', parser.parseNumber)
.option('-t, --timeout <timeout>', 'Set timeout for each request', parser.parseNumber, 60000)
.option('-d, --delay <delay>', 'Set delay for each request', parser.parseNumber, 0)
.option('--debug', 'Enable debug mode')
.parse(process.argv)
.opts()
const config = {
timeout: options.timeout,
delay: options.delay,
debug: options.debug
}
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/cluster/load'
async function main() {
logger.info('starting...')
logger.info(`timeout: ${options.timeout}ms`)
logger.info(`delay: ${options.delay}ms`)
timer.start()
const clusterLog = `${LOGS_DIR}/cluster_${options.clusterId}.log`
logger.info(`loading cluster: ${options.clusterId}`)
logger.info(`creating '${clusterLog}'...`)
await file.create(clusterLog)
await db.streams.load()
const items = await db.streams.find({ cluster_id: options.clusterId })
const total = items.length
logger.info(`found ${total} links`)
logger.info('checking...')
const results = {}
for (const [i, item] of items.entries()) {
const message = `[${i + 1}/${total}] ${item.filepath}: ${item.url}`
const request = {
_id: item._id,
url: item.url,
http: {
referrer: item.http_referrer,
'user-agent': item.user_agent
}
}
const result = await checker.check(request, config)
if (!result.error) {
logger.info(message)
} else {
logger.info(`${message} (${result.error})`)
}
const output = {
_id: result._id,
error: result.error,
streams: result.streams,
requests: result.requests
}
await file.append(clusterLog, JSON.stringify(output) + '\n')
}
logger.info(`done in ${timer.format('HH[h] mm[m] ss[s]')}`)
}
main()

View File

@ -1,104 +0,0 @@
const { db, file, parser, store, logger } = require('../core')
const transliteration = require('transliteration')
const { program } = require('commander')
const _ = require('lodash')
const options = program
.option(
'--max-clusters <max-clusters>',
'Set maximum number of clusters',
parser.parseNumber,
200
)
.option('--input-dir <input-dir>', 'Set path to input directory', 'channels')
.parse(process.argv)
.opts()
const links = []
async function main() {
logger.info('Starting...')
logger.info(`Number of clusters: ${options.maxClusters}`)
await loadChannels()
await saveToDatabase()
logger.info('Done')
}
main()
async function loadChannels() {
logger.info(`Loading links...`)
const files = await file.list(`${options.inputDir}/**/*.m3u`)
for (const filepath of files) {
const items = await parser.parsePlaylist(filepath)
for (const item of items) {
item.filepath = filepath
links.push(item)
}
}
logger.info(`Found ${links.length} links`)
}
async function saveToDatabase() {
logger.info('Saving to the database...')
await db.reset()
const chunks = split(_.shuffle(links), options.maxClusters)
for (const [i, chunk] of chunks.entries()) {
for (const item of chunk) {
const stream = store.create()
stream.set('name', { title: item.name })
stream.set('id', { id: item.tvg.id })
stream.set('filepath', { filepath: item.filepath })
stream.set('src_country', { filepath: item.filepath })
stream.set('tvg_country', { tvg_country: item.tvg.country })
stream.set('countries', { tvg_country: item.tvg.country })
stream.set('regions', { countries: stream.get('countries') })
stream.set('languages', { tvg_language: item.tvg.language })
stream.set('categories', { group_title: item.group.title })
stream.set('tvg_url', { tvg_url: item.tvg.url })
stream.set('guides', { tvg_url: item.tvg.url })
stream.set('logo', { logo: item.tvg.logo })
stream.set('resolution', { title: item.name })
stream.set('status', { title: item.name })
stream.set('url', { url: item.url })
stream.set('http', { http: item.http })
stream.set('is_nsfw', { categories: stream.get('categories') })
stream.set('is_broken', { status: stream.get('status') })
stream.set('updated', { updated: false })
stream.set('cluster_id', { cluster_id: i + 1 })
if (!stream.get('id')) {
const id = generateChannelId(stream.get('name'), stream.get('src_country'))
stream.set('id', { id })
}
await db.insert(stream.data())
}
}
}
function split(arr, n) {
let result = []
for (let i = n; i > 0; i--) {
result.push(arr.splice(0, Math.ceil(arr.length / i)))
}
return result
}
function generateChannelId(name, src_country) {
if (name && src_country) {
const slug = transliteration
.transliterate(name)
.replace(/\+/gi, 'Plus')
.replace(/[^a-z\d]+/gi, '')
const code = src_country.code.toLowerCase()
return `${slug}.${code}`
}
return null
}

View File

@ -0,0 +1,78 @@
const { db, file, parser, store, logger, id, api } = require('../../core')
const { program } = require('commander')
const _ = require('lodash')
const options = program
.option(
'--max-clusters <max-clusters>',
'Set maximum number of clusters',
parser.parseNumber,
256
)
.option('--input-dir <input-dir>', 'Set path to input directory', 'streams')
.parse(process.argv)
.opts()
async function main() {
logger.info('starting...')
logger.info(`number of clusters: ${options.maxClusters}`)
await saveToDatabase(await findStreams())
logger.info('done')
}
main()
async function findStreams() {
logger.info(`looking for streams...`)
await api.channels.load()
await db.streams.load()
const streams = []
const files = await file.list(`${options.inputDir}/**/*.m3u`)
for (const filepath of files) {
const items = await parser.parsePlaylist(filepath)
for (const item of items) {
item.filepath = filepath
const stream = store.create()
const channel = await api.channels.find({ id: item.tvg.id })
stream.set('channel', { channel: channel ? channel.id : null })
stream.set('title', { title: item.name })
stream.set('filepath', { filepath: item.filepath })
stream.set('url', { url: item.url })
stream.set('http_referrer', { http_referrer: item.http.referrer })
stream.set('user_agent', { user_agent: item.http['user-agent'] })
streams.push(stream)
}
}
logger.info(`found ${streams.length} streams`)
return streams
}
async function saveToDatabase(streams = []) {
logger.info('saving to the database...')
await db.streams.reset()
const chunks = split(_.shuffle(streams), options.maxClusters)
for (const [i, chunk] of chunks.entries()) {
for (const stream of chunk) {
stream.set('cluster_id', { cluster_id: i + 1 })
await db.streams.insert(stream.data())
}
}
}
function split(arr, n) {
let result = []
for (let i = n; i > 0; i--) {
result.push(arr.splice(0, Math.ceil(arr.length / i)))
}
return result
}

View File

@ -0,0 +1,26 @@
const { logger, db, file } = require('../../core')
const _ = require('lodash')
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.api'
async function main() {
await db.streams.load()
let streams = await db.streams.find({})
streams = _.sortBy(streams, 'channel')
streams = streams.map(stream => {
return {
channel: stream.channel,
url: stream.url,
http_referrer: stream.http_referrer,
user_agent: stream.user_agent,
status: stream.status,
width: stream.width,
height: stream.height,
bitrate: stream.bitrate
}
})
await file.create(`${PUBLIC_DIR}/streams.json`, JSON.stringify(streams))
}
main()

View File

@ -1,7 +1,8 @@
const { logger, db } = require('../core')
const { logger, db } = require('../../core')
async function main() {
const docs = await db.find({}).sort({ cluster_id: 1 })
await db.streams.load()
const docs = await db.streams.find({}).sort({ cluster_id: 1 })
const cluster_id = docs.reduce((acc, curr) => {
if (!acc.includes(curr.cluster_id)) acc.push(curr.cluster_id)
return acc

View File

@ -0,0 +1,153 @@
const { db, store, parser, file, logger } = require('../../core')
const _ = require('lodash')
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/cluster/load'
async function main() {
const streams = await loadStreams()
const results = await loadResults()
const origins = await loadOrigins(results)
await updateStreams(streams, results, origins)
}
main()
async function updateStreams(items = [], results = {}, origins = {}) {
logger.info('updating streams...')
let buffer = {}
let updated = 0
let removed = 0
for (const item of items) {
const stream = store.create(item)
const result = results[item._id]
if (result) {
const status = parseStatus(result.error)
stream.set('status', { status })
if (result.streams.length) {
const { width, height, bitrate } = parseMediaInfo(result.streams)
stream.set('width', { width })
stream.set('height', { height })
stream.set('bitrate', { bitrate })
}
if (result.requests.length) {
const origin = findOrigin(result.requests, origins)
if (origin) {
stream.set('url', { url: origin })
}
}
}
if (buffer[stream.get('url')]) {
await db.streams.remove({ _id: stream.get('_id') })
removed++
} else if (stream.changed) {
await db.streams.update({ _id: stream.get('_id') }, stream.data())
buffer[stream.get('url')] = true
updated++
}
}
db.streams.compact()
logger.info(`updated ${updated} streams`)
logger.info(`removed ${removed} duplicates`)
logger.info('done')
}
async function loadStreams() {
logger.info('loading streams...')
await db.streams.load()
const streams = await db.streams.find({})
logger.info(`found ${streams.length} streams`)
return streams
}
async function loadResults() {
logger.info('loading check results...')
const results = {}
const files = await file.list(`${LOGS_DIR}/cluster_*.log`)
for (const filepath of files) {
const parsed = await parser.parseLogs(filepath)
for (const item of parsed) {
results[item._id] = item
}
}
logger.info(`found ${Object.values(results).length} results`)
return results
}
async function loadOrigins(results = {}) {
logger.info('loading origins...')
const origins = {}
for (const { error, requests } of Object.values(results)) {
if (error || !Array.isArray(requests) || !requests.length) continue
let origin = requests.shift()
origin = new URL(origin.url)
for (const request of requests) {
const curr = new URL(request.url)
const key = curr.href.replace(/(^\w+:|^)/, '')
if (!origins[key] && curr.host === origin.host) {
origins[key] = origin.href
}
}
}
logger.info(`found ${_.uniq(Object.values(origins)).length} origins`)
return origins
}
function findOrigin(requests = [], origins = {}) {
if (origins && Array.isArray(requests)) {
requests = requests.map(r => r.url.replace(/(^\w+:|^)/, ''))
for (const url of requests) {
if (origins[url]) {
return origins[url]
}
}
}
return null
}
function parseMediaInfo(streams) {
streams = streams.filter(s => s.codec_type === 'video')
streams = _.orderBy(
streams,
['height', s => (s.tags && s.tags.variant_bitrate ? parseInt(s.tags.variant_bitrate) : 0)],
['desc', 'desc']
)
const data = _.head(streams)
if (data) {
const bitrate = data.tags && data.tags.variant_bitrate ? parseInt(data.tags.variant_bitrate) : 0
return { width: data.width, height: data.height, bitrate }
}
return {}
}
function parseStatus(error) {
if (!error) return 'online'
switch (error) {
case 'Operation timed out':
return 'timeout'
case 'Server returned 403 Forbidden (access denied)':
return 'blocked'
default:
return 'error'
}
}

View File

@ -1,441 +0,0 @@
const { db, logger, generator, file } = require('../core')
const _ = require('lodash')
let languages = []
let countries = []
let categories = []
let regions = []
const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs'
const PUBLIC_PATH = process.env.PUBLIC_PATH || '.gh-pages'
async function main() {
await setUp()
await generateCategories()
await generateCountries()
await generateLanguages()
await generateRegions()
await generateIndex()
await generateIndexNSFW()
await generateIndexCategory()
await generateIndexCountry()
await generateIndexLanguage()
await generateIndexRegion()
await generateChannelsJson()
}
main()
async function generateCategories() {
logger.info(`Generating categories/...`)
for (const category of categories) {
const { count } = await generator.generate(
`${PUBLIC_PATH}/categories/${category.slug}.m3u`,
{ categories: { $elemMatch: category } },
{ saveEmpty: true, includeNSFW: true }
)
await log('categories', {
name: category.name,
slug: category.slug,
count
})
}
const { count: otherCount } = await generator.generate(
`${PUBLIC_PATH}/categories/other.m3u`,
{ categories: { $size: 0 } },
{
saveEmpty: true,
onLoad: function (items) {
return items.map(item => {
item.group_title = 'Other'
return item
})
}
}
)
await log('categories', {
name: 'Other',
slug: 'other',
count: otherCount
})
}
async function generateCountries() {
logger.info(`Generating countries/...`)
for (const country of countries) {
const { count } = await generator.generate(
`${PUBLIC_PATH}/countries/${country.code.toLowerCase()}.m3u`,
{
countries: { $elemMatch: country }
}
)
await log('countries', {
name: country.name,
code: country.code,
count
})
}
const { count: undefinedCount } = await generator.generate(
`${PUBLIC_PATH}/countries/undefined.m3u`,
{
countries: { $size: 0 }
},
{
onLoad: function (items) {
return items.map(item => {
item.group_title = 'Undefined'
return item
})
}
}
)
await log('countries', {
name: 'Undefined',
code: 'UNDEFINED',
count: undefinedCount
})
}
async function generateLanguages() {
logger.info(`Generating languages/...`)
for (const language of _.uniqBy(languages, 'code')) {
const { count } = await generator.generate(`${PUBLIC_PATH}/languages/${language.code}.m3u`, {
languages: { $elemMatch: language }
})
await log('languages', {
name: language.name,
code: language.code,
count
})
}
const { count: undefinedCount } = await generator.generate(
`${PUBLIC_PATH}/languages/undefined.m3u`,
{
languages: { $size: 0 }
},
{
onLoad: function (items) {
return items.map(item => {
item.group_title = 'Undefined'
return item
})
}
}
)
await log('languages', {
name: 'Undefined',
code: 'undefined',
count: undefinedCount
})
}
async function generateRegions() {
logger.info(`Generating regions/...`)
for (const region of regions) {
const { count } = await generator.generate(
`${PUBLIC_PATH}/regions/${region.code.toLowerCase()}.m3u`,
{
regions: { $elemMatch: region }
}
)
await log('regions', {
name: region.name,
code: region.code,
count
})
}
const { count: undefinedCount } = await generator.generate(
`${PUBLIC_PATH}/regions/undefined.m3u`,
{ regions: { $size: 0 } },
{
saveEmpty: true,
onLoad: function (items) {
return items.map(item => {
item.group_title = 'Undefined'
return item
})
}
}
)
await log('regions', {
name: 'Undefined',
code: 'UNDEFINED',
count: undefinedCount
})
}
async function generateIndexNSFW() {
logger.info(`Generating index.nsfw.m3u...`)
await generator.generate(
`${PUBLIC_PATH}/index.nsfw.m3u`,
{},
{
includeNSFW: true,
onLoad: function (items) {
return items.map(item => {
if (!item.categories || !item.categories.length) {
item.group_title = 'Other'
}
return item
})
},
sortBy: item => {
if (item.group_title === 'Other') return '_'
return item.group_title || ''
}
}
)
}
async function generateIndex() {
logger.info(`Generating index.m3u...`)
await generator.generate(
`${PUBLIC_PATH}/index.m3u`,
{},
{
onLoad: function (items) {
return items.map(item => {
if (!item.categories || !item.categories.length) {
item.group_title = 'Other'
}
return item
})
},
sortBy: item => {
if (item.group_title === 'Other') return '_'
return item.group_title || ''
}
}
)
}
async function generateIndexCategory() {
logger.info(`Generating index.category.m3u...`)
await generator.generate(
`${PUBLIC_PATH}/index.category.m3u`,
{},
{
onLoad: function (items) {
let results = items
.filter(item => !item.categories || !item.categories.length)
.map(item => {
const newItem = _.cloneDeep(item)
newItem.group_title = 'Other'
return newItem
})
for (const category of _.sortBy(Object.values(categories), ['name'])) {
let filtered = items
.filter(item => {
return (
Array.isArray(item.categories) &&
item.categories.map(c => c.slug).includes(category.slug)
)
})
.map(item => {
const newItem = _.cloneDeep(item)
newItem.group_title = category.name
return newItem
})
results = results.concat(filtered)
}
return results
},
sortBy: item => {
if (item.group_title === 'Other') return '_'
return item.group_title
}
}
)
}
async function generateIndexCountry() {
logger.info(`Generating index.country.m3u...`)
await generator.generate(
`${PUBLIC_PATH}/index.country.m3u`,
{},
{
onLoad: function (items) {
let results = items
.filter(item => !item.countries || !item.countries.length)
.map(item => {
const newItem = _.cloneDeep(item)
newItem.group_title = 'Undefined'
newItem.categories = []
return newItem
})
for (const country of _.sortBy(Object.values(countries), ['name'])) {
let filtered = items
.filter(item => {
return (
Array.isArray(item.countries) &&
item.countries.map(c => c.code).includes(country.code)
)
})
.map(item => {
const newItem = _.cloneDeep(item)
newItem.group_title = country.name
return newItem
})
results = results.concat(filtered)
}
return results
},
sortBy: item => {
if (item.group_title === 'Undefined') return '_'
return item.group_title
}
}
)
}
async function generateIndexLanguage() {
logger.info(`Generating index.language.m3u...`)
await generator.generate(
`${PUBLIC_PATH}/index.language.m3u`,
{},
{
onLoad: function (items) {
let results = items
.filter(item => !item.languages || !item.languages.length)
.map(item => {
const newItem = _.cloneDeep(item)
newItem.group_title = 'Undefined'
newItem.categories = []
return newItem
})
for (const language of languages) {
let filtered = items
.filter(item => {
return (
Array.isArray(item.languages) &&
item.languages.map(c => c.code).includes(language.code)
)
})
.map(item => {
const newItem = _.cloneDeep(item)
newItem.group_title = language.name
return newItem
})
results = results.concat(filtered)
}
return results
},
sortBy: item => {
if (item.group_title === 'Undefined') return '_'
return item.group_title
}
}
)
}
async function generateIndexRegion() {
logger.info(`Generating index.region.m3u...`)
await generator.generate(
`${PUBLIC_PATH}/index.region.m3u`,
{},
{
onLoad: function (items) {
let results = items
.filter(item => !item.regions.length)
.map(item => {
const newItem = _.cloneDeep(item)
newItem.group_title = 'Undefined'
newItem.categories = []
return newItem
})
for (const region of regions) {
let filtered = items
.filter(item => {
return item.regions.map(c => c.code).includes(region.code)
})
.map(item => {
const newItem = _.cloneDeep(item)
newItem.group_title = region.name
return newItem
})
results = results.concat(filtered)
}
return results
},
sortBy: item => {
if (item.group_title === 'Undefined') return '_'
return item.group_title
}
}
)
}
async function generateChannelsJson() {
logger.info('Generating channels.json...')
await generator.generate(
`${PUBLIC_PATH}/channels.json`,
{},
{ format: 'json', includeNSFW: true, uniqBy: null }
)
}
async function setUp() {
logger.info(`Loading database...`)
const items = await db.find({})
categories = _.sortBy(_.uniqBy(_.flatten(items.map(i => i.categories)), 'slug'), ['name']).filter(
i => i
)
countries = _.sortBy(_.uniqBy(_.flatten(items.map(i => i.countries)), 'code'), ['name']).filter(
i => i
)
languages = _.sortBy(_.uniqBy(_.flatten(items.map(i => i.languages)), 'code'), ['name']).filter(
i => i
)
regions = _.sortBy(_.uniqBy(_.flatten(items.map(i => i.regions)), 'code'), ['name']).filter(
i => i
)
const categoriesLog = `${LOGS_PATH}/generate-playlists/categories.log`
const countriesLog = `${LOGS_PATH}/generate-playlists/countries.log`
const languagesLog = `${LOGS_PATH}/generate-playlists/languages.log`
const regionsLog = `${LOGS_PATH}/generate-playlists/regions.log`
logger.info(`Creating '${categoriesLog}'...`)
await file.create(categoriesLog)
logger.info(`Creating '${countriesLog}'...`)
await file.create(countriesLog)
logger.info(`Creating '${languagesLog}'...`)
await file.create(languagesLog)
logger.info(`Creating '${regionsLog}'...`)
await file.create(regionsLog)
}
async function log(type, data) {
await file.append(`${LOGS_PATH}/generate-playlists/${type}.log`, JSON.stringify(data) + '\n')
}

View File

@ -0,0 +1,74 @@
const { db, generator, api, logger, file } = require('../../core')
const { orderBy } = require('natural-orderby')
const _ = require('lodash')
async function main() {
const streams = await loadStreams()
logger.info('generating categories/...')
await generator.generate('categories', streams)
logger.info('generating countries/...')
await generator.generate('countries', streams)
logger.info('generating languages/...')
await generator.generate('languages', streams)
logger.info('generating regions/...')
await generator.generate('regions', streams)
logger.info('generating index.category.m3u...')
await generator.generate('index_category_m3u', streams)
logger.info('generating index.country.m3u...')
await generator.generate('index_country_m3u', streams)
logger.info('generating index.language.m3u...')
await generator.generate('index_language_m3u', streams)
logger.info('generating index.m3u...')
await generator.generate('index_m3u', streams)
logger.info('generating index.nsfw.m3u...')
await generator.generate('index_nsfw_m3u', streams)
logger.info('generating index.region.m3u...')
await generator.generate('index_region_m3u', streams)
}
main()
async function loadStreams() {
await db.streams.load()
let streams = await db.streams.find({})
streams = _.filter(streams, stream => stream.status !== 'error')
streams = orderBy(streams, ['channel', 'height', 'url'], ['asc', 'desc', 'asc'])
streams = _.uniqBy(streams, stream => stream.channel || _.uniqueId())
await api.channels.load()
let channels = await api.channels.all()
channels = _.keyBy(channels, 'id')
await api.categories.load()
let categories = await api.categories.all()
categories = _.keyBy(categories, 'id')
await api.languages.load()
let languages = await api.languages.all()
languages = _.keyBy(languages, 'code')
await api.guides.load()
let guides = await api.guides.all()
guides = _.groupBy(guides, 'channel')
streams = streams.map(stream => {
const channel = channels[stream.channel] || null
const filename = file.getFilename(stream.filepath)
const [_, code] = filename.match(/^([a-z]{2})(_|$)/) || [null, null]
const defaultBroadcastArea = code ? [`c/${code.toUpperCase()}`] : []
stream.guides = channel && Array.isArray(guides[channel.id]) ? guides[channel.id] : []
stream.categories = channel ? channel.categories.map(id => categories[id]) : []
stream.languages = channel ? channel.languages.map(id => languages[id]) : []
stream.broadcast_area = channel ? channel.broadcast_area : defaultBroadcastArea
stream.is_nsfw = channel ? channel.is_nsfw : false
stream.logo = channel ? channel.logo : null
return stream
})
streams = orderBy(streams, ['title'], ['asc'])
return streams
}

View File

@ -0,0 +1,18 @@
const { create: createPlaylist } = require('../../core/playlist')
const { db, logger, file } = require('../../core')
const { orderBy } = require('natural-orderby')
const _ = require('lodash')
async function main() {
await db.streams.load()
let streams = await db.streams.find({})
streams = orderBy(streams, ['title', 'height', 'url'], ['asc', 'desc', 'asc'])
const files = _.groupBy(streams, 'filepath')
for (const filepath in files) {
const playlist = createPlaylist(files[filepath], { public: false })
await file.create(filepath, playlist.toString())
}
}
main()

View File

@ -0,0 +1,87 @@
const { file, logger, api, parser, id } = require('../../core')
const { program } = require('commander')
const chalk = require('chalk')
const _ = require('lodash')
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
async function main() {
const files = program.args.length ? program.args : await file.list('channels/*.m3u')
logger.info(`loading blocklist...`)
await api.channels.load()
await api.blocklist.load()
let blocklist = await api.blocklist.all()
blocklist = blocklist
.map(blocked => {
const channel = api.channels.find({ id: blocked.channel })
if (!channel) return null
return { ...blocked, name: channel.name }
})
.filter(i => i)
logger.info(`found ${blocklist.length} records`)
let errors = []
let warnings = []
for (const filepath of files) {
if (!filepath.endsWith('.m3u')) continue
const basename = file.basename(filepath)
const [__, country] = basename.match(/([a-z]{2})(|_.*)\.m3u/i) || [null, null]
const fileLog = []
const items = await parser.parsePlaylist(filepath)
for (const item of items) {
if (item.tvg.id && !api.channels.find({ id: item.tvg.id })) {
fileLog.push({
type: 'warning',
line: item.line,
message: `"${item.tvg.id}" is not in the database`
})
}
const channel_id = id.generate(item.name, country)
const found = blocklist.find(
blocked =>
item.tvg.id.toLowerCase() === blocked.channel.toLowerCase() ||
channel_id.toLowerCase() === blocked.channel.toLowerCase()
)
if (found) {
fileLog.push({
type: 'error',
line: item.line,
message: `"${found.name}" is on the blocklist due to claims of copyright holders (${found.ref})`
})
}
}
if (fileLog.length) {
logger.info(`\n${chalk.underline(filepath)}`)
fileLog.forEach(err => {
const position = err.line.toString().padEnd(6, ' ')
const type = err.type.padEnd(9, ' ')
const status = err.type === 'error' ? chalk.red(type) : chalk.yellow(type)
logger.info(` ${chalk.gray(position)}${status}${err.message}`)
})
errors = errors.concat(fileLog.filter(e => e.type === 'error'))
warnings = warnings.concat(fileLog.filter(e => e.type === 'warning'))
}
}
logger.error(
chalk.red(
`\n${errors.length + warnings.length} problems (${errors.length} errors, ${
warnings.length
} warnings)`
)
)
if (errors.length) {
process.exit(1)
}
}
main()

View File

@ -0,0 +1,123 @@
const { file, markdown, parser, logger, api } = require('../../core')
const { create: createTable } = require('../../core/table')
const { program } = require('commander')
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/generators'
const options = program
.option('-c, --config <config>', 'Set path to config file', '.readme/config.json')
.parse(process.argv)
.opts()
async function main() {
await createCategoryTable()
await createCountryTable()
await createLanguageTable()
await createRegionTable()
await updateReadme()
}
main()
async function createCategoryTable() {
logger.info('creating category table...')
const rows = []
await api.categories.load()
const items = await parser.parseLogs(`${LOGS_DIR}/categories.log`)
for (const item of items) {
const id = file.getFilename(item.filepath)
const category = await api.categories.find({ id })
rows.push({
name: category ? category.name : 'Undefined',
channels: item.count,
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
})
}
const table = createTable(rows, [
{ name: 'Category' },
{ name: 'Channels', align: 'right' },
{ name: 'Playlist', nowrap: true }
])
await file.create('./.readme/_categories.md', table)
}
async function createCountryTable() {
logger.info('creating country table...')
const rows = []
await api.countries.load()
const items = await parser.parseLogs(`${LOGS_DIR}/countries.log`)
for (const item of items) {
const code = file.getFilename(item.filepath)
const country = await api.countries.find({ code: code.toUpperCase() })
rows.push({
name: country ? `${country.flag} ${country.name}` : 'Undefined',
channels: item.count,
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
})
}
const table = createTable(rows, [
{ name: 'Country' },
{ name: 'Channels', align: 'right' },
{ name: 'Playlist', nowrap: true }
])
await file.create('./.readme/_countries.md', table)
}
async function createLanguageTable() {
logger.info('creating language table...')
const rows = []
await api.languages.load()
const items = await parser.parseLogs(`${LOGS_DIR}/languages.log`)
for (const item of items) {
const code = file.getFilename(item.filepath)
const language = await api.languages.find({ code })
rows.push({
name: language ? language.name : 'Undefined',
channels: item.count,
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
})
}
const table = createTable(rows, [
{ name: 'Language', align: 'left' },
{ name: 'Channels', align: 'right' },
{ name: 'Playlist', align: 'left', nowrap: true }
])
await file.create('./.readme/_languages.md', table)
}
async function createRegionTable() {
logger.info('creating region table...')
const rows = []
await api.regions.load()
const items = await parser.parseLogs(`${LOGS_DIR}/regions.log`)
for (const item of items) {
const code = file.getFilename(item.filepath)
const region = await api.regions.find({ code: code.toUpperCase() })
rows.push({
name: region ? region.name : 'Undefined',
channels: item.count,
playlist: `<code>https://iptv-org.github.io/iptv/${item.filepath}</code>`
})
}
const table = createTable(rows, [
{ name: 'Region', align: 'left' },
{ name: 'Channels', align: 'right' },
{ name: 'Playlist', align: 'left', nowrap: true }
])
await file.create('./.readme/_regions.md', table)
}
async function updateReadme() {
logger.info('updating readme.md...')
const config = require(file.resolve(options.config))
await file.createDir(file.dirname(config.build))
await markdown.compile(options.config)
}

View File

@ -1,243 +0,0 @@
const _ = require('lodash')
const statuses = require('../data/statuses')
const languages = require('../data/languages')
const { db, store, parser, file, logger } = require('../core')
let epgCodes = []
let streams = []
let checkResults = {}
const origins = {}
const items = []
const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs'
const EPG_CODES_FILEPATH = process.env.EPG_CODES_FILEPATH || 'scripts/data/codes.json'
async function main() {
await setUp()
await loadDatabase()
await removeDuplicates()
await loadCheckResults()
await findStreamOrigins()
await updateStreams()
await updateDatabase()
}
main()
async function loadDatabase() {
logger.info('Loading database...')
streams = await db.find({})
logger.info(`Found ${streams.length} streams`)
}
async function removeDuplicates() {
logger.info('Removing duplicates...')
const before = streams.length
streams = _.uniqBy(streams, 'id')
const after = streams.length
logger.info(`Removed ${before - after} links`)
}
async function loadCheckResults() {
logger.info('Loading check results from logs/...')
const files = await file.list(`${LOGS_PATH}/check-streams/cluster_*.log`)
for (const filepath of files) {
const results = await parser.parseLogs(filepath)
for (const result of results) {
checkResults[result._id] = result
}
}
logger.info(`Found ${Object.values(checkResults).length} results`)
}
async function findStreamOrigins() {
logger.info('Searching for stream origins...')
for (const { error, requests } of Object.values(checkResults)) {
if (error || !Array.isArray(requests) || !requests.length) continue
let origin = requests.shift()
origin = new URL(origin.url)
for (const request of requests) {
const curr = new URL(request.url)
const key = curr.href.replace(/(^\w+:|^)/, '')
if (!origins[key] && curr.host === origin.host) {
origins[key] = origin.href
}
}
}
logger.info(`Found ${_.uniq(Object.values(origins)).length} origins`)
}
async function updateStreams() {
logger.info('Updating streams...')
let updated = 0
for (const item of streams) {
const stream = store.create(item)
const result = checkResults[item._id]
if (result) {
const { error, streams, requests } = result
const resolution = parseResolution(streams)
const origin = findOrigin(requests)
let status = parseStatus(error)
if (status) {
const prevStatus = item.status
if (prevStatus.code === 'not_247') // not_247 -> * = not_247
status = item.status
else if (prevStatus.code === 'geo_blocked') // geo_blocked -> * = geo_blocked
status = item.status
else if (status.code === 'geo_blocked') // * -> geo_blocked = *
status = item.status
else if (prevStatus.code === 'offline' && status.code === 'online') // offline -> online = not_247
status = statuses['not_247']
stream.set('status', { status })
stream.set('is_broken', { status: stream.get('status') })
}
if (resolution) {
stream.set('resolution', { resolution })
}
if (origin) {
stream.set('url', { url: origin })
}
}
if (!stream.has('logo')) {
const logo = findLogo(stream.get('id'))
stream.set('logo', { logo })
}
if (!stream.has('guides')) {
const guides = findGuides(stream.get('id'))
stream.set('guides', { guides })
}
if (!stream.has('countries') && stream.get('src_country')) {
const countries = [stream.get('src_country')]
stream.set('countries', { countries })
}
if (!stream.has('languages')) {
const languages = findLanguages(stream.get('countries'), stream.get('src_country'))
stream.set('languages', { languages })
}
if (stream.changed) {
stream.set('updated', true)
items.push(stream.data())
updated++
}
}
logger.info(`Updated ${updated} items`)
}
async function updateDatabase() {
logger.info('Updating database...')
for (const item of items) {
await db.update({ _id: item._id }, item)
}
db.compact()
logger.info('Done')
}
async function setUp() {
try {
const codes = await file.read(EPG_CODES_FILEPATH)
epgCodes = JSON.parse(codes)
} catch (err) {
logger.error(err.message)
}
}
function findLanguages(countries, src_country) {
if (countries && Array.isArray(countries)) {
let codes = countries.map(country => country.lang)
codes = _.uniq(codes)
return codes.map(code => languages.find(l => l.code === code)).filter(l => l)
}
if (src_country) {
const code = src_country.lang
const lang = languages.find(l => l.code === code)
return lang ? [lang] : []
}
return []
}
function findOrigin(requests) {
if (origins && Array.isArray(requests)) {
requests = requests.map(r => r.url.replace(/(^\w+:|^)/, ''))
for (const url of requests) {
if (origins[url]) {
return origins[url]
}
}
}
return null
}
function parseResolution(streams) {
const resolution = streams
.filter(s => s.codec_type === 'video')
.reduce(
(acc, curr) => {
if (curr.height > acc.height) return { width: curr.width, height: curr.height }
return acc
},
{ width: 0, height: 0 }
)
if (resolution.width > 0 && resolution.height > 0) return resolution
return null
}
function parseStatus(error) {
if (error) {
if (error.includes('timed out')) {
return statuses['timeout']
} else if (error.includes('403')) {
return statuses['geo_blocked']
}
return statuses['offline']
}
return statuses['online']
}
function findLogo(id) {
const item = epgCodes.find(i => i.tvg_id === id)
if (item && item.logo) {
return item.logo
}
return null
}
function findGuides(id) {
const item = epgCodes.find(i => i.tvg_id === id)
if (item && Array.isArray(item.guides)) {
return item.guides
}
return []
}

View File

@ -1,16 +0,0 @@
const _ = require('lodash')
const { generator, db, logger } = require('../core')
async function main() {
let items = await db
.find({})
.sort({ name: 1, 'status.level': 1, 'resolution.height': -1, url: 1 })
const files = _.groupBy(items, 'filepath')
for (const filepath in files) {
const items = files[filepath]
await generator.saveAsM3U(filepath, items, { includeGuides: false })
}
}
main()

View File

@ -1,140 +0,0 @@
const { file, markdown, parser, logger } = require('../core')
const { program } = require('commander')
let categories = []
let countries = []
let languages = []
let regions = []
const LOGS_PATH = process.env.LOGS_PATH || 'scripts/logs'
const options = program
.option('-c, --config <config>', 'Set path to config file', '.readme/config.json')
.parse(process.argv)
.opts()
async function main() {
await setUp()
await generateCategoryTable()
await generateLanguageTable()
await generateRegionTable()
await generateCountryTable()
await updateReadme()
}
main()
async function generateCategoryTable() {
logger.info('Generating category table...')
const rows = []
for (const category of categories) {
rows.push({
category: category.name,
channels: category.count,
playlist: `<code>https://iptv-org.github.io/iptv/categories/${category.slug}.m3u</code>`
})
}
const table = markdown.createTable(rows, [
{ name: 'Category', align: 'left' },
{ name: 'Channels', align: 'right' },
{ name: 'Playlist', align: 'left', nowrap: true }
])
await file.create('./.readme/_categories.md', table)
}
async function generateCountryTable() {
logger.info('Generating country table...')
const rows = []
for (const country of countries) {
const flag = getCountryFlag(country.code)
const prefix = flag ? `${flag} ` : ''
rows.push({
country: prefix + country.name,
channels: country.count,
playlist: `<code>https://iptv-org.github.io/iptv/countries/${country.code.toLowerCase()}.m3u</code>`
})
}
const table = markdown.createTable(rows, [
{ name: 'Country', align: 'left' },
{ name: 'Channels', align: 'right' },
{ name: 'Playlist', align: 'left', nowrap: true }
])
await file.create('./.readme/_countries.md', table)
}
async function generateRegionTable() {
logger.info('Generating region table...')
const rows = []
for (const region of regions) {
rows.push({
region: region.name,
channels: region.count,
playlist: `<code>https://iptv-org.github.io/iptv/regions/${region.code.toLowerCase()}.m3u</code>`
})
}
const table = markdown.createTable(rows, [
{ name: 'Region', align: 'left' },
{ name: 'Channels', align: 'right' },
{ name: 'Playlist', align: 'left', nowrap: true }
])
await file.create('./.readme/_regions.md', table)
}
async function generateLanguageTable() {
logger.info('Generating language table...')
const rows = []
for (const language of languages) {
rows.push({
language: language.name,
channels: language.count,
playlist: `<code>https://iptv-org.github.io/iptv/languages/${language.code}.m3u</code>`
})
}
const table = markdown.createTable(rows, [
{ name: 'Language', align: 'left' },
{ name: 'Channels', align: 'right' },
{ name: 'Playlist', align: 'left', nowrap: true }
])
await file.create('./.readme/_languages.md', table)
}
async function updateReadme() {
logger.info('Updating README.md...')
const config = require(file.resolve(options.config))
await file.createDir(file.dirname(config.build))
await markdown.compile(options.config)
}
async function setUp() {
categories = await parser.parseLogs(`${LOGS_PATH}/generate-playlists/categories.log`)
countries = await parser.parseLogs(`${LOGS_PATH}/generate-playlists/countries.log`)
languages = await parser.parseLogs(`${LOGS_PATH}/generate-playlists/languages.log`)
regions = await parser.parseLogs(`${LOGS_PATH}/generate-playlists/regions.log`)
}
function getCountryFlag(code) {
switch (code) {
case 'UK':
return '🇬🇧'
case 'UNDEFINED':
return ''
default:
return code.replace(/./g, char => String.fromCodePoint(char.charCodeAt(0) + 127397))
}
}

View File

@ -1,55 +0,0 @@
const blocklist = require('../data/blocklist')
const parser = require('iptv-playlist-parser')
const { file, logger } = require('../core')
const { program } = require('commander')
const options = program
.option('--input-dir <input-dir>', 'Set path to input directory', 'channels')
.parse(process.argv)
.opts()
async function main() {
const files = await file.list(`${options.inputDir}/**/*.m3u`)
const errors = []
for (const filepath of files) {
const content = await file.read(filepath)
const playlist = parser.parse(content)
const basename = file.basename(filepath)
const [_, country] = basename.match(/([a-z]{2})(|_.*)\.m3u/i) || [null, null]
const items = playlist.items
.map(item => {
const details = check(item, country)
return details ? { ...item, details } : null
})
.filter(i => i)
items.forEach(item => {
errors.push(
`${filepath}:${item.line} '${item.details.name}' is on the blocklist due to claims of copyright holders (${item.details.reference})`
)
})
}
errors.forEach(error => {
logger.error(error)
})
if (errors.length) {
logger.info('')
process.exit(1)
}
}
function check(channel, country) {
return blocklist.find(item => {
const regexp = new RegExp(item.regex, 'i')
const hasSameName = regexp.test(channel.name)
const fromSameCountry = country === item.country.toLowerCase()
return hasSameName && fromSameCountry
})
}
main()

39
scripts/core/api.js Normal file
View File

@ -0,0 +1,39 @@
const _ = require('lodash')
const file = require('./file')
const DATA_DIR = process.env.DATA_DIR || './scripts/data'
class API {
constructor(filepath) {
this.filepath = file.resolve(filepath)
}
async load() {
const data = await file.read(this.filepath)
this.collection = JSON.parse(data)
}
find(query) {
return _.find(this.collection, query)
}
filter(query) {
return _.filter(this.collection, query)
}
all() {
return this.collection
}
}
const api = {}
api.channels = new API(`${DATA_DIR}/channels.json`)
api.countries = new API(`${DATA_DIR}/countries.json`)
api.guides = new API(`${DATA_DIR}/guides.json`)
api.categories = new API(`${DATA_DIR}/categories.json`)
api.languages = new API(`${DATA_DIR}/languages.json`)
api.regions = new API(`${DATA_DIR}/regions.json`)
api.blocklist = new API(`${DATA_DIR}/blocklist.json`)
module.exports = api

View File

@ -1,61 +1,75 @@
const Database = require('nedb-promises')
const nedb = require('nedb-promises')
const file = require('./file')
const DB_FILEPATH = process.env.DB_FILEPATH || './scripts/channels.db'
const DB_DIR = process.env.DB_DIR || './scripts/database'
const nedb = Database.create({
filename: file.resolve(DB_FILEPATH),
autoload: true,
onload(err) {
if (err) console.error(err)
},
compareStrings: (a, b) => {
a = a.replace(/\s/g, '_')
b = b.replace(/\s/g, '_')
class Database {
constructor(filepath) {
this.filepath = filepath
}
return a.localeCompare(b, undefined, {
sensitivity: 'accent',
numeric: true
load() {
this.db = nedb.create({
filename: file.resolve(this.filepath),
autoload: true,
onload: err => {
if (err) console.error(err)
},
compareStrings: (a, b) => {
a = a.replace(/\s/g, '_')
b = b.replace(/\s/g, '_')
return a.localeCompare(b, undefined, {
sensitivity: 'accent',
numeric: true
})
}
})
}
})
removeIndex(field) {
return this.db.removeIndex(field)
}
addIndex(options) {
return this.db.ensureIndex(options)
}
compact() {
return this.db.persistence.compactDatafile()
}
stopAutocompact() {
return this.db.persistence.stopAutocompaction()
}
reset() {
return file.clear(this.filepath)
}
count(query) {
return this.db.count(query)
}
insert(doc) {
return this.db.insert(doc)
}
update(query, update) {
return this.db.update(query, update)
}
find(query) {
return this.db.find(query)
}
remove(query, options) {
return this.db.remove(query, options)
}
}
const db = {}
db.removeIndex = function (field) {
return nedb.removeIndex(field)
}
db.addIndex = function (options) {
return nedb.ensureIndex(options)
}
db.compact = function () {
return nedb.persistence.compactDatafile()
}
db.reset = function () {
return file.clear(DB_FILEPATH)
}
db.count = function (query) {
return nedb.count(query)
}
db.insert = function (doc) {
return nedb.insert(doc)
}
db.update = function (query, update) {
return nedb.update(query, update)
}
db.find = function (query) {
return nedb.find(query)
}
db.remove = function (query, options) {
return nedb.remove(query, options)
}
db.streams = new Database(`${DB_DIR}/streams.db`)
module.exports = db

View File

@ -1,6 +1,9 @@
const { create: createPlaylist } = require('./playlist')
const store = require('./store')
const path = require('path')
const glob = require('glob')
const fs = require('mz/fs')
const fs = require('fs-extra')
const _ = require('lodash')
const file = {}

View File

@ -1,114 +1,33 @@
const { create: createPlaylist } = require('./playlist')
const store = require('./store')
const file = require('./file')
const generators = require('../generators')
const logger = require('./logger')
const db = require('./db')
const _ = require('lodash')
const file = require('./file')
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.gh-pages'
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/generators'
const generator = {}
generator.generate = async function (filepath, query = {}, options = {}) {
options = {
...{
format: 'm3u',
saveEmpty: false,
includeNSFW: false,
includeGuides: true,
includeBroken: false,
onLoad: r => r,
uniqBy: item => item.id || _.uniqueId(),
sortBy: null
},
...options
}
query['is_nsfw'] = options.includeNSFW ? { $in: [true, false] } : false
query['is_broken'] = options.includeBroken ? { $in: [true, false] } : false
let items = await db
.find(query)
.sort({ name: 1, 'status.level': 1, 'resolution.height': -1, url: 1 })
items = _.uniqBy(items, 'url')
if (!options.saveEmpty && !items.length) return { filepath, query, options, count: 0 }
if (options.uniqBy) items = _.uniqBy(items, options.uniqBy)
items = options.onLoad(items)
if (options.sortBy) items = _.sortBy(items, options.sortBy)
switch (options.format) {
case 'json':
await saveAsJSON(filepath, items, options)
break
case 'm3u':
default:
await saveAsM3U(filepath, items, options)
break
}
return { filepath, query, options, count: items.length }
}
async function saveAsM3U(filepath, items, options) {
const playlist = await createPlaylist(filepath)
const header = {}
if (options.includeGuides) {
let guides = items.map(item => item.guides)
guides = _.uniq(_.flatten(guides)).sort().join(',')
header['x-tvg-url'] = guides
}
await playlist.header(header)
for (const item of items) {
const stream = store.create(item)
await playlist.link(
stream.get('url'),
stream.get('title'),
{
'tvg-id': stream.get('tvg_id'),
'tvg-country': stream.get('tvg_country'),
'tvg-language': stream.get('tvg_language'),
'tvg-logo': stream.get('tvg_logo'),
// 'tvg-url': stream.get('tvg_url') || undefined,
'user-agent': stream.get('http.user-agent') || undefined,
'group-title': stream.get('group_title')
},
{
'http-referrer': stream.get('http.referrer') || undefined,
'http-user-agent': stream.get('http.user-agent') || undefined
}
)
}
}
async function saveAsJSON(filepath, items, options) {
const output = items.map(item => {
const stream = store.create(item)
const categories = stream.get('categories').map(c => ({ name: c.name, slug: c.slug }))
const countries = stream.get('countries').map(c => ({ name: c.name, code: c.code }))
return {
name: stream.get('name'),
logo: stream.get('logo'),
url: stream.get('url'),
categories,
countries,
languages: stream.get('languages'),
tvg: {
id: stream.get('tvg_id'),
name: stream.get('name'),
url: stream.get('tvg_url')
generator.generate = async function (name, streams = []) {
if (typeof generators[name] === 'function') {
try {
let output = await generators[name].bind()(streams)
output = Array.isArray(output) ? output : [output]
for (const type of output) {
const playlist = createPlaylist(type.items, { public: true })
await file.create(`${PUBLIC_DIR}/${type.filepath}`, playlist.toString())
}
await file.create(`${LOGS_DIR}/${name}.log`, output.map(toJSON).join('\n'))
} catch (error) {
logger.error(`generators/${name}.js: ${error.message}`)
}
})
await file.create(filepath, JSON.stringify(output))
}
}
generator.saveAsM3U = saveAsM3U
generator.saveAsJSON = saveAsJSON
module.exports = generator
function toJSON(type) {
type.count = type.items.length
delete type.items
return JSON.stringify(type)
}

19
scripts/core/id.js Normal file
View File

@ -0,0 +1,19 @@
const { transliterate } = require('transliteration')
const id = {}
id.generate = function (name, code) {
if (!name || !code) return null
name = name.replace(/ *\([^)]*\) */g, '')
name = name.replace(/ *\[[^)]*\] */g, '')
name = name.replace(/\+/gi, 'Plus')
name = name.replace(/[^a-z\d]+/gi, '')
name = name.trim()
name = transliterate(name)
code = code.toLowerCase()
return `${name}.${code}`
}
module.exports = id

View File

@ -8,3 +8,5 @@ exports.generator = require('./generator')
exports.playlist = require('./playlist')
exports.store = require('./store')
exports.markdown = require('./markdown')
exports.api = require('./api')
exports.id = require('./id')

View File

@ -1,42 +1,13 @@
const { createLogger, format, transports, addColors } = require('winston')
const { combine, timestamp, printf } = format
const { Signale } = require('signale')
const consoleFormat = ({ level, message, timestamp }) => {
if (typeof message === 'object') return JSON.stringify(message)
return message
}
const options = {}
const config = {
levels: {
error: 0,
warn: 1,
info: 2,
failed: 3,
success: 4,
http: 5,
verbose: 6,
debug: 7,
silly: 8
},
colors: {
info: 'white',
success: 'green',
failed: 'red'
}
}
const logger = new Signale(options)
const t = [
new transports.Console({
format: format.combine(format.printf(consoleFormat))
})
]
const logger = createLogger({
transports: t,
levels: config.levels,
level: 'verbose'
logger.config({
displayLabel: false,
displayScope: false,
displayBadge: false
})
addColors(config.colors)
module.exports = logger

View File

@ -3,35 +3,6 @@ const file = require('./file')
const markdown = {}
markdown.createTable = function (data, cols) {
let output = '<table>\n'
output += ' <thead>\n <tr>'
for (let column of cols) {
output += `<th align="${column.align}">${column.name}</th>`
}
output += '</tr>\n </thead>\n'
output += ' <tbody>\n'
for (let item of data) {
output += ' <tr>'
let i = 0
for (let prop in item) {
const column = cols[i]
let nowrap = column.nowrap
let align = column.align
output += `<td align="${align}"${nowrap ? ' nowrap' : ''}>${item[prop]}</td>`
i++
}
output += '</tr>\n'
}
output += ' </tbody>\n'
output += '</table>'
return output
}
markdown.compile = function (filepath) {
markdownInclude.compileFiles(file.resolve(filepath))
}

View File

@ -1,49 +1,88 @@
const file = require('./file')
const store = require('./store')
const _ = require('lodash')
const playlist = {}
playlist.create = async function (filepath) {
playlist.filepath = filepath
const dir = file.dirname(filepath)
file.createDir(dir)
await file.create(filepath, '')
class Playlist {
constructor(items = [], options = {}) {
this.header = {}
if (options.public) {
let guides = items
.map(item => (item.guides.length ? item.guides[0].url : null))
.filter(i => i)
this.header['x-tvg-url'] = _.uniq(guides).sort().join(',')
}
return playlist
}
this.links = []
for (const item of items) {
const stream = store.create(item)
playlist.header = async function (attrs) {
let header = `#EXTM3U`
for (const name in attrs) {
const value = attrs[name]
header += ` ${name}="${value}"`
}
header += `\n`
let attrs
if (options.public) {
attrs = {
'tvg-id': stream.get('tvg_id'),
'tvg-country': stream.get('tvg_country'),
'tvg-language': stream.get('tvg_language'),
'tvg-logo': stream.get('tvg_logo'),
'user-agent': stream.get('http.user-agent') || undefined,
'group-title': stream.get('group_title')
}
} else {
attrs = {
'tvg-id': stream.get('tvg_id'),
status: stream.get('status'),
'user-agent': stream.get('http.user-agent') || undefined
}
}
await file.append(playlist.filepath, header)
const vlcOpts = {
'http-referrer': stream.get('http.referrer') || undefined,
'http-user-agent': stream.get('http.user-agent') || undefined
}
return playlist
}
playlist.link = async function (url, title, attrs, vlcOpts) {
let link = `#EXTINF:-1`
for (const name in attrs) {
const value = attrs[name]
if (value !== undefined) {
link += ` ${name}="${value}"`
this.links.push({
url: stream.get('url'),
title: stream.get('title'),
attrs,
vlcOpts
})
}
}
link += `,${title}\n`
for (const name in vlcOpts) {
const value = vlcOpts[name]
if (value !== undefined) {
link += `#EXTVLCOPT:${name}=${value}\n`
toString() {
let output = `#EXTM3U`
for (const attr in this.header) {
const value = this.header[attr]
output += ` ${attr}="${value}"`
}
output += `\n`
for (const link of this.links) {
output += `#EXTINF:-1`
for (const name in link.attrs) {
const value = link.attrs[name]
if (value !== undefined) {
output += ` ${name}="${value}"`
}
}
output += `,${link.title}\n`
for (const name in link.vlcOpts) {
const value = link.vlcOpts[name]
if (value !== undefined) {
output += `#EXTVLCOPT:${name}=${value}\n`
}
}
output += `${link.url}\n`
}
return output
}
link += `${url}\n`
}
await file.append(playlist.filepath, link)
return playlist
playlist.create = function (items, options) {
return new Playlist(items, options)
}
module.exports = playlist

32
scripts/core/table.js Normal file
View File

@ -0,0 +1,32 @@
const table = {}
table.create = function (data, cols) {
let output = '<table>\n'
output += ' <thead>\n <tr>'
for (let column of cols) {
output += `<th align="left">${column.name}</th>`
}
output += '</tr>\n </thead>\n'
output += ' <tbody>\n'
for (let item of data) {
output += ' <tr>'
let i = 0
for (let prop in item) {
const column = cols[i]
let nowrap = column.nowrap ? ` nowrap` : ''
let align = column.align ? ` align="${column.align}"` : ''
output += `<td${align}${nowrap}>${item[prop]}</td>`
i++
}
output += '</tr>\n'
}
output += ' </tbody>\n'
output += '</table>'
return output
}
module.exports = table

View File

@ -1 +1,2 @@
codes.json
*
!.gitignore

File diff suppressed because one or more lines are too long

View File

@ -1,147 +0,0 @@
{
"auto": {
"name": "Auto",
"slug": "auto",
"nsfw": false
},
"animation": {
"name": "Animation",
"slug": "animation",
"nsfw": false
},
"business": {
"name": "Business",
"slug": "business",
"nsfw": false
},
"classic": {
"name": "Classic",
"slug": "classic",
"nsfw": false
},
"comedy": {
"name": "Comedy",
"slug": "comedy",
"nsfw": false
},
"cooking": {
"name": "Cooking",
"slug": "cooking",
"nsfw": false
},
"culture": {
"name": "Culture",
"slug": "culture",
"nsfw": false
},
"documentary": {
"name": "Documentary",
"slug": "documentary",
"nsfw": false
},
"education": {
"name": "Education",
"slug": "education",
"nsfw": false
},
"entertainment": {
"name": "Entertainment",
"slug": "entertainment",
"nsfw": false
},
"family": {
"name": "Family",
"slug": "family",
"nsfw": false
},
"general": {
"name": "General",
"slug": "general",
"nsfw": false
},
"kids": {
"name": "Kids",
"slug": "kids",
"nsfw": false
},
"legislative": {
"name": "Legislative",
"slug": "legislative",
"nsfw": false
},
"lifestyle": {
"name": "Lifestyle",
"slug": "lifestyle",
"nsfw": false
},
"local": {
"name": "Local",
"slug": "local",
"nsfw": false
},
"movies": {
"name": "Movies",
"slug": "movies",
"nsfw": false
},
"music": {
"name": "Music",
"slug": "music",
"nsfw": false
},
"news": {
"name": "News",
"slug": "news",
"nsfw": false
},
"outdoor": {
"name": "Outdoor",
"slug": "outdoor",
"nsfw": false
},
"relax": {
"name": "Relax",
"slug": "relax",
"nsfw": false
},
"religious": {
"name": "Religious",
"slug": "religious",
"nsfw": false
},
"series": {
"name": "Series",
"slug": "series",
"nsfw": false
},
"science": {
"name": "Science",
"slug": "science",
"nsfw": false
},
"shop": {
"name": "Shop",
"slug": "shop",
"nsfw": false
},
"sports": {
"name": "Sports",
"slug": "sports",
"nsfw": false
},
"travel": {
"name": "Travel",
"slug": "travel",
"nsfw": false
},
"weather": {
"name": "Weather",
"slug": "weather",
"nsfw": false
},
"xxx": {
"name": "XXX",
"slug": "xxx",
"nsfw": true
}
}

View File

@ -1,264 +0,0 @@
{
"AD": { "name": "Andorra", "code": "AD", "lang": "cat" },
"AE": { "name": "United Arab Emirates", "code": "AE", "lang": "ara" },
"AF": { "name": "Afghanistan", "code": "AF", "lang": "pus" },
"AG": { "name": "Antigua and Barbuda", "code": "AG", "lang": "eng" },
"AI": { "name": "Anguilla", "code": "AI", "lang": "eng" },
"AL": { "name": "Albania", "code": "AL", "lang": "sqi" },
"AM": { "name": "Armenia", "code": "AM", "lang": "hye" },
"AO": { "name": "Angola", "code": "AO", "lang": "por" },
"AQ": { "name": "Antarctica", "code": "AQ", "lang": null },
"AR": { "name": "Argentina", "code": "AR", "lang": "spa" },
"AS": { "name": "American Samoa", "code": "AS", "lang": "eng" },
"AT": { "name": "Austria", "code": "AT", "lang": "deu" },
"AU": { "name": "Australia", "code": "AU", "lang": "eng" },
"AW": { "name": "Aruba", "code": "AW", "lang": "nld" },
"AX": { "name": "Åland", "code": "AX", "lang": "swe" },
"AZ": { "name": "Azerbaijan", "code": "AZ", "lang": "aze" },
"BA": { "name": "Bosnia and Herzegovina", "code": "BA", "lang": "bos" },
"BB": { "name": "Barbados", "code": "BB", "lang": "eng" },
"BD": { "name": "Bangladesh", "code": "BD", "lang": "ben" },
"BE": { "name": "Belgium", "code": "BE", "lang": "nld" },
"BF": { "name": "Burkina Faso", "code": "BF", "lang": "fra" },
"BG": { "name": "Bulgaria", "code": "BG", "lang": "bul" },
"BH": { "name": "Bahrain", "code": "BH", "lang": "ara" },
"BI": { "name": "Burundi", "code": "BI", "lang": "fra" },
"BJ": { "name": "Benin", "code": "BJ", "lang": "fra" },
"BL": { "name": "Saint Barthélemy", "code": "BL", "lang": "fra" },
"BM": { "name": "Bermuda", "code": "BM", "lang": "eng" },
"BN": { "name": "Brunei", "code": "BN", "lang": "msa" },
"BO": { "name": "Bolivia", "code": "BO", "lang": "spa" },
"BQ": { "name": "Bonaire", "code": "BQ", "lang": "nld" },
"BR": { "name": "Brazil", "code": "BR", "lang": "por" },
"BS": { "name": "Bahamas", "code": "BS", "lang": "eng" },
"BT": { "name": "Bhutan", "code": "BT", "lang": "dzo" },
"BV": { "name": "Bouvet Island", "code": "BV", "lang": "nor" },
"BW": { "name": "Botswana", "code": "BW", "lang": "eng" },
"BY": { "name": "Belarus", "code": "BY", "lang": "bel" },
"BZ": { "name": "Belize", "code": "BZ", "lang": "eng" },
"CA": { "name": "Canada", "code": "CA", "lang": "eng" },
"CC": { "name": "Cocos [Keeling] Islands", "code": "CC", "lang": "eng" },
"CD": {
"name": "Democratic Republic of the Congo",
"code": "CD",
"lang": "fra"
},
"CF": { "name": "Central African Republic", "code": "CF", "lang": "fra" },
"CG": { "name": "Republic of the Congo", "code": "CG", "lang": "fra" },
"CH": { "name": "Switzerland", "code": "CH", "lang": "deu" },
"CI": { "name": "Ivory Coast", "code": "CI", "lang": "fra" },
"CK": { "name": "Cook Islands", "code": "CK", "lang": "eng" },
"CL": { "name": "Chile", "code": "CL", "lang": "spa" },
"CM": { "name": "Cameroon", "code": "CM", "lang": "eng" },
"CN": { "name": "China", "code": "CN", "lang": "zho" },
"CO": { "name": "Colombia", "code": "CO", "lang": "spa" },
"CR": { "name": "Costa Rica", "code": "CR", "lang": "spa" },
"CU": { "name": "Cuba", "code": "CU", "lang": "spa" },
"CV": { "name": "Cape Verde", "code": "CV", "lang": "por" },
"CW": { "name": "Curacao", "code": "CW", "lang": "nld" },
"CX": { "name": "Christmas Island", "code": "CX", "lang": "eng" },
"CY": { "name": "Cyprus", "code": "CY", "lang": "ell" },
"CZ": { "name": "Czech Republic", "code": "CZ", "lang": "ces" },
"DE": { "name": "Germany", "code": "DE", "lang": "deu" },
"DJ": { "name": "Djibouti", "code": "DJ", "lang": "fra" },
"DK": { "name": "Denmark", "code": "DK", "lang": "dan" },
"DM": { "name": "Dominica", "code": "DM", "lang": "eng" },
"DO": { "name": "Dominican Republic", "code": "DO", "lang": "spa" },
"DZ": { "name": "Algeria", "code": "DZ", "lang": "ara" },
"EC": { "name": "Ecuador", "code": "EC", "lang": "spa" },
"EE": { "name": "Estonia", "code": "EE", "lang": "est" },
"EG": { "name": "Egypt", "code": "EG", "lang": "ara" },
"EH": { "name": "Western Sahara", "code": "EH", "lang": "spa" },
"ER": { "name": "Eritrea", "code": "ER", "lang": "tir" },
"ES": { "name": "Spain", "code": "ES", "lang": "spa" },
"ET": { "name": "Ethiopia", "code": "ET", "lang": "amh" },
"FI": { "name": "Finland", "code": "FI", "lang": "fin" },
"FJ": { "name": "Fiji", "code": "FJ", "lang": "eng" },
"FK": { "name": "Falkland Islands", "code": "FK", "lang": "eng" },
"FM": { "name": "Micronesia", "code": "FM", "lang": "eng" },
"FO": { "name": "Faroe Islands", "code": "FO", "lang": "fao" },
"FR": { "name": "France", "code": "FR", "lang": "fra" },
"GA": { "name": "Gabon", "code": "GA", "lang": "fra" },
"UK": { "name": "United Kingdom", "code": "UK", "lang": "eng" },
"GD": { "name": "Grenada", "code": "GD", "lang": "eng" },
"GE": { "name": "Georgia", "code": "GE", "lang": "kat" },
"GF": { "name": "French Guiana", "code": "GF", "lang": "fra" },
"GG": { "name": "Guernsey", "code": "GG", "lang": "eng" },
"GH": { "name": "Ghana", "code": "GH", "lang": "eng" },
"GI": { "name": "Gibraltar", "code": "GI", "lang": "eng" },
"GL": { "name": "Greenland", "code": "GL", "lang": "kal" },
"GM": { "name": "Gambia", "code": "GM", "lang": "eng" },
"GN": { "name": "Guinea", "code": "GN", "lang": "fra" },
"GP": { "name": "Guadeloupe", "code": "GP", "lang": "fra" },
"GQ": { "name": "Equatorial Guinea", "code": "GQ", "lang": "spa" },
"GR": { "name": "Greece", "code": "GR", "lang": "ell" },
"GS": {
"name": "South Georgia and the South Sandwich Islands",
"code": "GS",
"lang": "eng"
},
"GT": { "name": "Guatemala", "code": "GT", "lang": "spa" },
"GU": { "name": "Guam", "code": "GU", "lang": "eng" },
"GW": { "name": "Guinea-Bissau", "code": "GW", "lang": "por" },
"GY": { "name": "Guyana", "code": "GY", "lang": "eng" },
"HK": { "name": "Hong Kong", "code": "HK", "lang": "zho" },
"HM": { "name": "Heard Island and McDonald Islands", "code": "HM", "lang": "eng" },
"HN": { "name": "Honduras", "code": "HN", "lang": "spa" },
"HR": { "name": "Croatia", "code": "HR", "lang": "hrv" },
"HT": { "name": "Haiti", "code": "HT", "lang": "fra" },
"HU": { "name": "Hungary", "code": "HU", "lang": "hun" },
"ID": { "name": "Indonesia", "code": "ID", "lang": "ind" },
"IE": { "name": "Ireland", "code": "IE", "lang": "gle" },
"IL": { "name": "Israel", "code": "IL", "lang": "heb" },
"IM": { "name": "Isle of Man", "code": "IM", "lang": "eng" },
"IN": { "name": "India", "code": "IN", "lang": "hin" },
"IO": { "name": "British Indian Ocean Territory", "code": "IO", "lang": "eng" },
"IQ": { "name": "Iraq", "code": "IQ", "lang": "ara" },
"IR": { "name": "Iran", "code": "IR", "lang": "fas" },
"IS": { "name": "Iceland", "code": "IS", "lang": "isl" },
"IT": { "name": "Italy", "code": "IT", "lang": "ita" },
"JE": { "name": "Jersey", "code": "JE", "lang": "eng" },
"JM": { "name": "Jamaica", "code": "JM", "lang": "eng" },
"JO": { "name": "Jordan", "code": "JO", "lang": "ara" },
"JP": { "name": "Japan", "code": "JP", "lang": "jpn" },
"KE": { "name": "Kenya", "code": "KE", "lang": "eng" },
"KG": { "name": "Kyrgyzstan", "code": "KG", "lang": "kir" },
"KH": { "name": "Cambodia", "code": "KH", "lang": "khm" },
"KI": { "name": "Kiribati", "code": "KI", "lang": "eng" },
"KM": { "name": "Comoros", "code": "KM", "lang": "ara" },
"KN": { "name": "Saint Kitts and Nevis", "code": "KN", "lang": "eng" },
"KP": { "name": "North Korea", "code": "KP", "lang": "kor" },
"KR": { "name": "South Korea", "code": "KR", "lang": "kor" },
"KW": { "name": "Kuwait", "code": "KW", "lang": "ara" },
"KY": { "name": "Cayman Islands", "code": "KY", "lang": "eng" },
"KZ": { "name": "Kazakhstan", "code": "KZ", "lang": "kaz" },
"LA": { "name": "Laos", "code": "LA", "lang": "lao" },
"LB": { "name": "Lebanon", "code": "LB", "lang": "ara" },
"LC": { "name": "Saint Lucia", "code": "LC", "lang": "eng" },
"LI": { "name": "Liechtenstein", "code": "LI", "lang": "deu" },
"LK": { "name": "Sri Lanka", "code": "LK", "lang": "sin" },
"LR": { "name": "Liberia", "code": "LR", "lang": "eng" },
"LS": { "name": "Lesotho", "code": "LS", "lang": "eng" },
"LT": { "name": "Lithuania", "code": "LT", "lang": "lit" },
"LU": { "name": "Luxembourg", "code": "LU", "lang": "fra" },
"LV": { "name": "Latvia", "code": "LV", "lang": "lav" },
"LY": { "name": "Libya", "code": "LY", "lang": "ara" },
"MA": { "name": "Morocco", "code": "MA", "lang": "ara" },
"MC": { "name": "Monaco", "code": "MC", "lang": "fra" },
"MD": { "name": "Moldova", "code": "MD", "lang": "ron" },
"ME": { "name": "Montenegro", "code": "ME", "lang": "srp" },
"MF": { "name": "Saint Martin", "code": "MF", "lang": "eng" },
"MG": { "name": "Madagascar", "code": "MG", "lang": "fra" },
"MH": { "name": "Marshall Islands", "code": "MH", "lang": "eng" },
"MK": { "name": "North Macedonia", "code": "MK", "lang": "mkd" },
"ML": { "name": "Mali", "code": "ML", "lang": "fra" },
"MM": { "name": "Myanmar [Burma]", "code": "MM", "lang": "mya" },
"MN": { "name": "Mongolia", "code": "MN", "lang": "mon" },
"MO": { "name": "Macao", "code": "MO", "lang": "zho" },
"MP": { "name": "Northern Mariana Islands", "code": "MP", "lang": "eng" },
"MQ": { "name": "Martinique", "code": "MQ", "lang": "fra" },
"MR": { "name": "Mauritania", "code": "MR", "lang": "ara" },
"MS": { "name": "Montserrat", "code": "MS", "lang": "eng" },
"MT": { "name": "Malta", "code": "MT", "lang": "mlt" },
"MU": { "name": "Mauritius", "code": "MU", "lang": "eng" },
"MV": { "name": "Maldives", "code": "MV", "lang": "div" },
"MW": { "name": "Malawi", "code": "MW", "lang": "eng" },
"MX": { "name": "Mexico", "code": "MX", "lang": "spa" },
"MY": { "name": "Malaysia", "code": "MY", "lang": "msa" },
"MZ": { "name": "Mozambique", "code": "MZ", "lang": "por" },
"NA": { "name": "Namibia", "code": "NA", "lang": "eng" },
"NC": { "name": "New Caledonia", "code": "NC", "lang": "fra" },
"NE": { "name": "Niger", "code": "NE", "lang": "fra" },
"NF": { "name": "Norfolk Island", "code": "NF", "lang": "eng" },
"NG": { "name": "Nigeria", "code": "NG", "lang": "eng" },
"NI": { "name": "Nicaragua", "code": "NI", "lang": "spa" },
"NL": { "name": "Netherlands", "code": "NL", "lang": "nld" },
"NO": { "name": "Norway", "code": "NO", "lang": "nor" },
"NP": { "name": "Nepal", "code": "NP", "lang": "nep" },
"NR": { "name": "Nauru", "code": "NR", "lang": "eng" },
"NU": { "name": "Niue", "code": "NU", "lang": "eng" },
"NZ": { "name": "New Zealand", "code": "NZ", "lang": "eng" },
"OM": { "name": "Oman", "code": "OM", "lang": "ara" },
"PA": { "name": "Panama", "code": "PA", "lang": "spa" },
"PE": { "name": "Peru", "code": "PE", "lang": "spa" },
"PF": { "name": "French Polynesia", "code": "PF", "lang": "fra" },
"PG": { "name": "Papua New Guinea", "code": "PG", "lang": "eng" },
"PH": { "name": "Philippines", "code": "PH", "lang": "eng" },
"PK": { "name": "Pakistan", "code": "PK", "lang": "eng" },
"PL": { "name": "Poland", "code": "PL", "lang": "pol" },
"PM": { "name": "Saint Pierre and Miquelon", "code": "PM", "lang": "fra" },
"PN": { "name": "Pitcairn Islands", "code": "PN", "lang": "eng" },
"PR": { "name": "Puerto Rico", "code": "PR", "lang": "spa" },
"PS": { "name": "Palestine", "code": "PS", "lang": "ara" },
"PT": { "name": "Portugal", "code": "PT", "lang": "por" },
"PW": { "name": "Palau", "code": "PW", "lang": "eng" },
"PY": { "name": "Paraguay", "code": "PY", "lang": "spa" },
"QA": { "name": "Qatar", "code": "QA", "lang": "ara" },
"RE": { "name": "Réunion", "code": "RE", "lang": "fra" },
"RO": { "name": "Romania", "code": "RO", "lang": "ron" },
"RS": { "name": "Serbia", "code": "RS", "lang": "srp" },
"RU": { "name": "Russia", "code": "RU", "lang": "rus" },
"RW": { "name": "Rwanda", "code": "RW", "lang": "kin" },
"SA": { "name": "Saudi Arabia", "code": "SA", "lang": "ara" },
"SB": { "name": "Solomon Islands", "code": "SB", "lang": "eng" },
"SC": { "name": "Seychelles", "code": "SC", "lang": "fra" },
"SD": { "name": "Sudan", "code": "SD", "lang": "ara" },
"SE": { "name": "Sweden", "code": "SE", "lang": "swe" },
"SG": { "name": "Singapore", "code": "SG", "lang": "eng" },
"SH": { "name": "Saint Helena", "code": "SH", "lang": "eng" },
"SI": { "name": "Slovenia", "code": "SI", "lang": "slv" },
"SJ": { "name": "Svalbard and Jan Mayen", "code": "SJ", "lang": "nor" },
"SK": { "name": "Slovakia", "code": "SK", "lang": "slk" },
"SL": { "name": "Sierra Leone", "code": "SL", "lang": "eng" },
"SM": { "name": "San Marino", "code": "SM", "lang": "ita" },
"SN": { "name": "Senegal", "code": "SN", "lang": "fra" },
"SO": { "name": "Somalia", "code": "SO", "lang": "som" },
"SR": { "name": "Suriname", "code": "SR", "lang": "nld" },
"SS": { "name": "South Sudan", "code": "SS", "lang": "eng" },
"ST": { "name": "São Tomé and Príncipe", "code": "ST", "lang": "por" },
"SV": { "name": "El Salvador", "code": "SV", "lang": "spa" },
"SX": { "name": "Sint Maarten", "code": "SX", "lang": "nld" },
"SY": { "name": "Syria", "code": "SY", "lang": "ara" },
"SZ": { "name": "Swaziland", "code": "SZ", "lang": "eng" },
"TC": { "name": "Turks and Caicos Islands", "code": "TC", "lang": "eng" },
"TD": { "name": "Chad", "code": "TD", "lang": "fra" },
"TF": { "name": "French Southern Territories", "code": "TF", "lang": "fra" },
"TG": { "name": "Togo", "code": "TG", "lang": "fra" },
"TH": { "name": "Thailand", "code": "TH", "lang": "tha" },
"TJ": { "name": "Tajikistan", "code": "TJ", "lang": "tgk" },
"TK": { "name": "Tokelau", "code": "TK", "lang": "eng" },
"TL": { "name": "East Timor", "code": "TL", "lang": "por" },
"TM": { "name": "Turkmenistan", "code": "TM", "lang": "tuk" },
"TN": { "name": "Tunisia", "code": "TN", "lang": "ara" },
"TO": { "name": "Tonga", "code": "TO", "lang": "eng" },
"TR": { "name": "Turkey", "code": "TR", "lang": "tur" },
"TT": { "name": "Trinidad and Tobago", "code": "TT", "lang": "eng" },
"TV": { "name": "Tuvalu", "code": "TV", "lang": "eng" },
"TW": { "name": "Taiwan", "code": "TW", "lang": "zho" },
"TZ": { "name": "Tanzania", "code": "TZ", "lang": "swa" },
"UA": { "name": "Ukraine", "code": "UA", "lang": "ukr" },
"UG": { "name": "Uganda", "code": "UG", "lang": "eng" },
"UM": { "name": "U.S. Minor Outlying Islands", "code": "UM", "lang": "eng" },
"US": { "name": "United States", "code": "US", "lang": "eng" },
"UY": { "name": "Uruguay", "code": "UY", "lang": "spa" },
"UZ": { "name": "Uzbekistan", "code": "UZ", "lang": "uzb" },
"VA": { "name": "Vatican City", "code": "VA", "lang": "ita" },
"VC": { "name": "Saint Vincent and the Grenadines", "code": "VC", "lang": "eng" },
"VE": { "name": "Venezuela", "code": "VE", "lang": "spa" },
"VG": { "name": "British Virgin Islands", "code": "VG", "lang": "eng" },
"VI": { "name": "U.S. Virgin Islands", "code": "VI", "lang": "eng" },
"VN": { "name": "Vietnam", "code": "VN", "lang": "vie" },
"VU": { "name": "Vanuatu", "code": "VU", "lang": "bis" },
"WF": { "name": "Wallis and Futuna", "code": "WF", "lang": "fra" },
"WS": { "name": "Samoa", "code": "WS", "lang": "smo" },
"XK": { "name": "Kosovo", "code": "XK", "lang": "sqi" },
"YE": { "name": "Yemen", "code": "YE", "lang": "ara" },
"YT": { "name": "Mayotte", "code": "YT", "lang": "fra" },
"ZA": {
"name": "South Africa",
"code": "ZA",
"lang": "afr"
},
"ZM": { "name": "Zambia", "code": "ZM", "lang": "eng" },
"ZW": { "name": "Zimbabwe", "code": "ZW", "lang": "eng" }
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,27 +0,0 @@
{
"online": {
"label": "",
"code": "online",
"level": 1
},
"geo_blocked": {
"label": "Geo-blocked",
"code": "geo_blocked",
"level": 2
},
"not_247": {
"label": "Not 24/7",
"code": "not_247",
"level": 3
},
"timeout": {
"label": "Timeout",
"code": "timeout",
"level": 4
},
"offline": {
"label": "Offline",
"code": "offline",
"level": 5
}
}

View File

@ -0,0 +1,18 @@
const api = require('../core/api')
const _ = require('lodash')
module.exports = async function (streams = []) {
await api.categories.load()
const categories = await api.categories.all()
const output = []
for (const category of categories) {
let items = _.filter(streams, { categories: [{ id: category.id }] })
output.push({ filepath: `categories/${category.id}.m3u`, items })
}
let items = _.filter(streams, stream => !stream.categories.length)
output.push({ filepath: 'categories/undefined.m3u', items })
return output
}

View File

@ -0,0 +1,30 @@
const api = require('../core/api')
const _ = require('lodash')
module.exports = async function (streams = []) {
streams = _.filter(streams, stream => stream.is_nsfw === false)
await api.countries.load()
const countries = await api.countries.all()
await api.regions.load()
const regions = await api.regions.all()
const output = []
for (const country of countries) {
const countryAreaCodes = _.filter(regions, { countries: [country.code] }).map(
r => `r/${r.code}`
)
countryAreaCodes.push(`c/${country.code}`)
let items = _.filter(streams, stream => {
return _.intersection(stream.broadcast_area, countryAreaCodes).length
})
output.push({ filepath: `countries/${country.code.toLowerCase()}.m3u`, items })
}
let items = _.filter(streams, stream => !stream.broadcast_area.length)
output.push({ filepath: 'countries/undefined.m3u', items })
return output
}

View File

@ -0,0 +1,10 @@
exports.categories = require('./categories')
exports.countries = require('./countries')
exports.languages = require('./languages')
exports.regions = require('./regions')
exports.index_m3u = require('./index_m3u')
exports.index_nsfw_m3u = require('./index_nsfw_m3u')
exports.index_category_m3u = require('./index_category_m3u')
exports.index_country_m3u = require('./index_country_m3u')
exports.index_language_m3u = require('./index_language_m3u')
exports.index_region_m3u = require('./index_region_m3u')

View File

@ -0,0 +1,32 @@
const _ = require('lodash')
module.exports = async function (streams = []) {
streams = _.filter(streams, stream => stream.is_nsfw === false)
let items = []
streams.forEach(stream => {
if (!stream.categories.length) {
const item = _.cloneDeep(stream)
item.group_title = 'Undefined'
items.push(item)
return
}
stream.categories
.filter(c => c)
.forEach(category => {
const item = _.cloneDeep(stream)
item.group_title = category.name
items.push(item)
})
})
items = _.sortBy(items, item => {
if (item.group_title === 'Undefined') return ''
return item.group_title
})
return { filepath: 'index.category.m3u', items }
}

View File

@ -0,0 +1,63 @@
const api = require('../core/api')
const _ = require('lodash')
module.exports = async function (streams = []) {
streams = _.filter(streams, stream => stream.is_nsfw === false)
await api.regions.load()
let regions = await api.regions.all()
regions = _.keyBy(regions, 'code')
await api.countries.load()
let countries = await api.countries.all()
countries = _.keyBy(countries, 'code')
let items = []
streams.forEach(stream => {
if (!stream.broadcast_area.length) {
const item = _.cloneDeep(stream)
item.group_title = 'Undefined'
items.push(item)
return
}
getBroadcastCountries(stream, { countries, regions }).forEach(country => {
const item = _.cloneDeep(stream)
item.group_title = country.name
items.push(item)
})
})
items = _.sortBy(items, item => {
if (item.group_title === 'Undefined') return ''
return item.group_title
})
return { filepath: 'index.country.m3u', items }
}
function getBroadcastCountries(stream, { countries, regions }) {
let codes = stream.broadcast_area.reduce((acc, item) => {
const [type, code] = item.split('/')
switch (type) {
case 'c':
acc.push(code)
break
case 'r':
if (regions[code]) {
acc = acc.concat(regions[code].countries)
}
break
case 's':
const [c] = item.split('-')
acc.push(c)
break
}
return acc
}, [])
codes = _.uniq(codes)
return codes.map(code => countries[code]).filter(c => c)
}

View File

@ -0,0 +1,29 @@
const _ = require('lodash')
module.exports = async function (streams = []) {
streams = _.filter(streams, stream => stream.is_nsfw === false)
let items = []
streams.forEach(stream => {
if (!stream.languages.length) {
const item = _.cloneDeep(stream)
item.group_title = 'Undefined'
items.push(stream)
return
}
stream.languages.forEach(language => {
const item = _.cloneDeep(stream)
item.group_title = language.name
items.push(item)
})
})
items = _.sortBy(items, i => {
if (i.group_title === 'Undefined') return ''
return i.group_title
})
return { filepath: 'index.language.m3u', items }
}

View File

@ -0,0 +1,7 @@
const api = require('../core/api')
const _ = require('lodash')
module.exports = async function (streams = []) {
streams = _.filter(streams, stream => stream.is_nsfw === false)
return { filepath: 'index.m3u', items: streams }
}

View File

@ -0,0 +1,6 @@
const api = require('../core/api')
const _ = require('lodash')
module.exports = async function (streams = []) {
return { filepath: 'index.nsfw.m3u', items: streams }
}

View File

@ -0,0 +1,57 @@
const api = require('../core/api')
const _ = require('lodash')
module.exports = async function (streams = []) {
streams = _.filter(streams, stream => stream.is_nsfw === false)
await api.regions.load()
let regions = await api.regions.all()
regions = _.keyBy(regions, 'code')
let items = []
streams.forEach(stream => {
if (!stream.broadcast_area.length) {
const item = _.cloneDeep(stream)
item.group_title = 'Undefined'
items.push(item)
return
}
getChannelRegions(stream, { regions }).forEach(region => {
const item = _.cloneDeep(stream)
item.group_title = region.name
items.push(item)
})
})
items = _.sortBy(items, i => {
if (i.group_title === 'Undefined') return ''
return i.group_title
})
return { filepath: 'index.region.m3u', items }
}
function getChannelRegions(stream, { regions }) {
return stream.broadcast_area
.reduce((acc, item) => {
const [type, code] = item.split('/')
switch (type) {
case 'r':
acc.push(regions[code])
break
case 's':
const [c] = item.split('-')
const r1 = _.filter(regions, { countries: [c] })
acc = acc.concat(r1)
break
case 'c':
const r2 = _.filter(regions, { countries: [code] })
acc = acc.concat(r2)
break
}
return acc
}, [])
.filter(i => i)
}

View File

@ -0,0 +1,25 @@
const _ = require('lodash')
module.exports = async function (streams = []) {
streams = _.filter(streams, stream => stream.is_nsfw === false)
let languages = []
streams.forEach(stream => {
languages = languages.concat(stream.languages)
})
languages = _.uniqBy(languages, 'code')
languages = _.sortBy(languages, 'name')
const output = []
for (const language of languages) {
let items = _.filter(streams, { languages: [{ code: language.code }] })
if (items.length) {
output.push({ filepath: `languages/${language.code}.m3u`, items })
}
}
let items = _.filter(streams, stream => !stream.languages.length)
output.push({ filepath: 'languages/undefined.m3u', items })
return output
}

View File

@ -0,0 +1,23 @@
const api = require('../core/api')
const _ = require('lodash')
module.exports = async function (streams = []) {
streams = _.filter(streams, stream => stream.is_nsfw === false)
await api.regions.load()
const regions = await api.regions.all()
const output = []
for (const region of regions) {
const areaCodes = region.countries.map(code => `c/${code}`)
areaCodes.push(`r/${region.code}`)
let items = _.filter(streams, stream => _.intersection(stream.broadcast_area, areaCodes).length)
output.push({ filepath: `regions/${region.code.toLowerCase()}.m3u`, items })
}
let items = _.filter(streams, stream => !stream.broadcast_area.length)
output.push({ filepath: 'regions/undefined.m3u', items })
return output
}

View File

@ -1,12 +1,13 @@
module.exports = function () {
if (this.group_title) return this.group_title
if (Array.isArray(this.categories)) {
if (this.categories.length) {
return this.categories
.map(i => i.name)
.filter(c => c)
.map(category => category.name)
.sort()
.join(';')
}
return ''
return 'Undefined'
}

View File

@ -1,7 +1,5 @@
exports.group_title = require('./group_title')
exports.title = require('./title')
exports.tvg_country = require('./tvg_country')
exports.tvg_id = require('./tvg_id')
exports.tvg_language = require('./tvg_language')
exports.tvg_logo = require('./tvg_logo')
exports.tvg_url = require('./tvg_url')
exports.tvg_country = require('./tvg_country')
exports.tvg_language = require('./tvg_language')

View File

@ -1,13 +0,0 @@
module.exports = function () {
let title = this.name
if (this.resolution.height) {
title += ` (${this.resolution.height}p)`
}
if (this.status.label) {
title += ` [${this.status.label}]`
}
return title
}

View File

@ -1,5 +1,16 @@
module.exports = function () {
if (this.tvg_country) return this.tvg_country
return Array.isArray(this.countries) ? this.countries.map(i => i.code).join(';') : ''
if (this.broadcast_area.length) {
return this.broadcast_area
.map(item => {
const [_, code] = item.split('/')
return code
})
.filter(i => i)
.sort()
.join(';')
}
return ''
}

View File

@ -1,3 +1,3 @@
module.exports = function () {
return this.id || ''
return this.channel || ''
}

View File

@ -1,3 +1,13 @@
module.exports = function () {
return Array.isArray(this.languages) ? this.languages.map(i => i.name).join(';') : ''
if (this.tvg_language) return this.tvg_language
if (this.languages.length) {
return this.languages
.map(language => (language ? language.name : null))
.filter(l => l)
.sort()
.join(';')
}
return ''
}

View File

@ -1,3 +1,5 @@
module.exports = function () {
if (this.tvg_logo) return this.tvg_logo
return this.logo || ''
}

View File

@ -1,3 +0,0 @@
module.exports = function () {
return this.guides.length ? this.guides[0] : ''
}

View File

@ -1,8 +0,0 @@
const categories = require('../../data/categories')
module.exports = function ({ group_title }) {
return group_title
.split(';')
.map(i => categories[i.toLowerCase()])
.filter(i => i)
}

View File

@ -0,0 +1,3 @@
module.exports = function ({ channel }) {
return channel || null
}

View File

@ -1,25 +0,0 @@
const dataRegions = require('../../data/regions')
const dataCountries = require('../../data/countries')
module.exports = function ({ tvg_country, countries = [] }) {
if (tvg_country) {
return tvg_country
.split(';')
.reduce((acc, curr) => {
const region = dataRegions[curr]
if (region) {
for (let code of region.country_codes) {
if (!acc.includes(code)) acc.push(code)
}
} else {
acc.push(curr)
}
return acc
}, [])
.map(item => dataCountries[item])
.filter(i => i)
}
return countries
}

View File

@ -1,3 +0,0 @@
module.exports = function ({ tvg_url, guides = [] }) {
return tvg_url ? [tvg_url] : guides
}

View File

@ -0,0 +1,3 @@
module.exports = function ({ http_referrer }) {
return http_referrer || null
}

View File

@ -1,12 +1,4 @@
exports.categories = require('./categories')
exports.countries = require('./countries')
exports.guides = require('./guides')
exports.is_broken = require('./is_broken')
exports.is_nsfw = require('./is_nsfw')
exports.languages = require('./languages')
exports.name = require('./name')
exports.regions = require('./regions')
exports.resolution = require('./resolution')
exports.src_country = require('./src_country')
exports.status = require('./status')
exports.url = require('./url')
exports.http_referrer = require('./http_referrer')
exports.user_agent = require('./user_agent')
exports.channel = require('./channel')

View File

@ -1,7 +0,0 @@
module.exports = function ({ is_broken = false, status }) {
if (status) {
return status.level > 3 ? true : false
}
return is_broken
}

View File

@ -1,3 +0,0 @@
module.exports = function ({ categories }) {
return Array.isArray(categories) ? categories.filter(c => c.nsfw).length > 0 : false
}

View File

@ -1,12 +0,0 @@
const langs = require('../../data/languages')
module.exports = function ({ tvg_language, languages = [] }) {
if (tvg_language) {
return tvg_language
.split(';')
.map(name => langs.find(l => l.name === name))
.filter(i => i)
}
return languages
}

View File

@ -1,10 +0,0 @@
module.exports = function ({ title }) {
return title
.trim()
.split(' ')
.map(s => s.trim())
.filter(s => {
return !/\[|\]/i.test(s) && !/\((\d+)P\)/i.test(s)
})
.join(' ')
}

View File

@ -1,22 +0,0 @@
const _ = require('lodash')
let regions = require('../../data/regions')
module.exports = function ({ countries }) {
if (!countries.length) return []
const output = []
regions = Object.values(regions)
countries.forEach(country => {
regions
.filter(region => region.country_codes.includes(country.code))
.forEach(found => {
output.push({
name: found.name,
code: found.code
})
})
})
return _.uniqBy(output, 'code')
}

View File

@ -1,9 +0,0 @@
module.exports = function ({ title, resolution = {} }) {
if (title) {
const [_, h] = title.match(/\((\d+)P\)/i) || [null, null]
return h ? { height: parseInt(h), width: null } : resolution
}
return resolution
}

View File

@ -1,13 +0,0 @@
const { file } = require('../../core')
const countries = require('../../data/countries')
module.exports = function ({ filepath }) {
if (filepath) {
const basename = file.basename(filepath)
const [_, code] = basename.match(/([a-z]{2})(|_.*)\.m3u/i) || [null, null]
return code ? countries[code.toUpperCase()] : null
}
return null
}

View File

@ -1,11 +0,0 @@
const statuses = require('../../data/statuses')
module.exports = function ({ title, status = {} }) {
if (title) {
const [_, label] = title.match(/\[(.*)\]/i) || [null, null]
return Object.values(statuses).find(s => s.label === label) || statuses['online']
}
return status
}

View File

@ -0,0 +1,3 @@
module.exports = function ({ user_agent }) {
return user_agent || null
}

Some files were not shown because too many files have changed in this diff Show More