mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2025-03-10 14:40:04 +01:00
[ie] Avoid potentially confusing imports
This commit is contained in:
parent
9f813e716b
commit
879a85aeb9
@ -1,5 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
from socket import timeout
|
import socket
|
||||||
|
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
@ -55,7 +55,7 @@ class DTubeIE(InfoExtractor):
|
|||||||
try:
|
try:
|
||||||
self.to_screen('%s: Checking %s video format URL' % (video_id, format_id))
|
self.to_screen('%s: Checking %s video format URL' % (video_id, format_id))
|
||||||
self._downloader._opener.open(video_url, timeout=5).close()
|
self._downloader._opener.open(video_url, timeout=5).close()
|
||||||
except timeout:
|
except socket.timeout:
|
||||||
self.to_screen(
|
self.to_screen(
|
||||||
'%s: %s URL is invalid, skipping' % (video_id, format_id))
|
'%s: %s URL is invalid, skipping' % (video_id, format_id))
|
||||||
continue
|
continue
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
from __future__ import unicode_literals
|
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
traverse_obj,
|
traverse_obj,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import itertools
|
import itertools
|
||||||
import re
|
import re
|
||||||
from urllib.parse import urlparse, parse_qs
|
import urllib.parse
|
||||||
|
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
@ -381,7 +381,7 @@ class NaverNowIE(NaverBaseIE):
|
|||||||
|
|
||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
show_id = self._match_id(url)
|
show_id = self._match_id(url)
|
||||||
qs = parse_qs(urlparse(url).query)
|
qs = urllib.parse.parse_qs(urllib.parse.urlparse(url).query)
|
||||||
|
|
||||||
if not self._yes_playlist(show_id, qs.get('shareHightlight')):
|
if not self._yes_playlist(show_id, qs.get('shareHightlight')):
|
||||||
return self._extract_highlight(show_id, qs['shareHightlight'][0])
|
return self._extract_highlight(show_id, qs['shareHightlight'][0])
|
||||||
|
@ -4,8 +4,7 @@ import itertools
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
import urllib.parse
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
from .common import InfoExtractor, SearchInfoExtractor
|
from .common import InfoExtractor, SearchInfoExtractor
|
||||||
from ..networking import Request
|
from ..networking import Request
|
||||||
@ -946,7 +945,7 @@ class NiconicoLiveIE(InfoExtractor):
|
|||||||
'frontend_id': traverse_obj(embedded_data, ('site', 'frontendId')) or '9',
|
'frontend_id': traverse_obj(embedded_data, ('site', 'frontendId')) or '9',
|
||||||
})
|
})
|
||||||
|
|
||||||
hostname = remove_start(urlparse(urlh.url).hostname, 'sp.')
|
hostname = remove_start(urllib.parse.urlparse(urlh.url).hostname, 'sp.')
|
||||||
latency = try_get(self._configuration_arg('latency'), lambda x: x[0])
|
latency = try_get(self._configuration_arg('latency'), lambda x: x[0])
|
||||||
if latency not in self._KNOWN_LATENCY:
|
if latency not in self._KNOWN_LATENCY:
|
||||||
latency = 'high'
|
latency = 'high'
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
import calendar
|
import calendar
|
||||||
import json
|
import datetime
|
||||||
import functools
|
import functools
|
||||||
from datetime import datetime, timezone
|
import json
|
||||||
from random import random
|
import random
|
||||||
|
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from ..compat import (
|
from ..compat import (
|
||||||
@ -243,7 +243,7 @@ class PanoptoIE(PanoptoBaseIE):
|
|||||||
invocation_id = delivery_info.get('InvocationId')
|
invocation_id = delivery_info.get('InvocationId')
|
||||||
stream_id = traverse_obj(delivery_info, ('Delivery', 'Streams', ..., 'PublicID'), get_all=False, expected_type=str)
|
stream_id = traverse_obj(delivery_info, ('Delivery', 'Streams', ..., 'PublicID'), get_all=False, expected_type=str)
|
||||||
if invocation_id and stream_id and duration:
|
if invocation_id and stream_id and duration:
|
||||||
timestamp_str = f'/Date({calendar.timegm(datetime.now(timezone.utc).timetuple())}000)/'
|
timestamp_str = f'/Date({calendar.timegm(datetime.datetime.now(datetime.timezone.utc).timetuple())}000)/'
|
||||||
data = {
|
data = {
|
||||||
'streamRequests': [
|
'streamRequests': [
|
||||||
{
|
{
|
||||||
@ -415,7 +415,7 @@ class PanoptoIE(PanoptoBaseIE):
|
|||||||
'cast': traverse_obj(delivery, ('Contributors', ..., 'DisplayName'), expected_type=lambda x: x or None),
|
'cast': traverse_obj(delivery, ('Contributors', ..., 'DisplayName'), expected_type=lambda x: x or None),
|
||||||
'timestamp': session_start_time - 11640000000 if session_start_time else None,
|
'timestamp': session_start_time - 11640000000 if session_start_time else None,
|
||||||
'duration': delivery.get('Duration'),
|
'duration': delivery.get('Duration'),
|
||||||
'thumbnail': base_url + f'/Services/FrameGrabber.svc/FrameRedirect?objectId={video_id}&mode=Delivery&random={random()}',
|
'thumbnail': base_url + f'/Services/FrameGrabber.svc/FrameRedirect?objectId={video_id}&mode=Delivery&random={random.random()}',
|
||||||
'average_rating': delivery.get('AverageRating'),
|
'average_rating': delivery.get('AverageRating'),
|
||||||
'chapters': self._extract_chapters(timestamps),
|
'chapters': self._extract_chapters(timestamps),
|
||||||
'uploader': delivery.get('OwnerDisplayName') or None,
|
'uploader': delivery.get('OwnerDisplayName') or None,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
|
import datetime
|
||||||
import json
|
import json
|
||||||
from datetime import date
|
import urllib.parse
|
||||||
from urllib.parse import unquote
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from ..compat import functools
|
from ..compat import functools
|
||||||
@ -99,7 +99,7 @@ class Pr0grammIE(InfoExtractor):
|
|||||||
cookies = self._get_cookies(self.BASE_URL)
|
cookies = self._get_cookies(self.BASE_URL)
|
||||||
if 'me' not in cookies:
|
if 'me' not in cookies:
|
||||||
self._download_webpage(self.BASE_URL, None, 'Refreshing verification information')
|
self._download_webpage(self.BASE_URL, None, 'Refreshing verification information')
|
||||||
if traverse_obj(cookies, ('me', {lambda x: x.value}, {unquote}, {json.loads}, 'verified')):
|
if traverse_obj(cookies, ('me', {lambda x: x.value}, {urllib.parse.unquote}, {json.loads}, 'verified')):
|
||||||
flags |= 0b0110
|
flags |= 0b0110
|
||||||
|
|
||||||
return flags
|
return flags
|
||||||
@ -183,7 +183,7 @@ class Pr0grammIE(InfoExtractor):
|
|||||||
'like_count': ('up', {int}),
|
'like_count': ('up', {int}),
|
||||||
'dislike_count': ('down', {int}),
|
'dislike_count': ('down', {int}),
|
||||||
'upload_timestamp': ('created', {int}),
|
'upload_timestamp': ('created', {int}),
|
||||||
'upload_date': ('created', {int}, {date.fromtimestamp}, {lambda x: x.strftime('%Y%m%d')}),
|
'upload_date': ('created', {int}, {datetime.date.fromtimestamp}, {lambda x: x.strftime('%Y%m%d')}),
|
||||||
'thumbnail': ('thumb', {lambda x: urljoin('https://thumb.pr0gramm.com', x)})
|
'thumbnail': ('thumb', {lambda x: urljoin('https://thumb.pr0gramm.com', x)})
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
@ -1,18 +1,14 @@
|
|||||||
from .common import InfoExtractor
|
|
||||||
from ..utils import (
|
|
||||||
clean_html,
|
|
||||||
traverse_obj,
|
|
||||||
unescapeHTML,
|
|
||||||
)
|
|
||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
from urllib.parse import urlencode
|
import urllib.parse
|
||||||
|
|
||||||
|
from .common import InfoExtractor
|
||||||
|
from ..utils import clean_html, traverse_obj, unescapeHTML
|
||||||
|
|
||||||
|
|
||||||
class RadioKapitalBaseIE(InfoExtractor):
|
class RadioKapitalBaseIE(InfoExtractor):
|
||||||
def _call_api(self, resource, video_id, note='Downloading JSON metadata', qs={}):
|
def _call_api(self, resource, video_id, note='Downloading JSON metadata', qs={}):
|
||||||
return self._download_json(
|
return self._download_json(
|
||||||
f'https://www.radiokapital.pl/wp-json/kapital/v1/{resource}?{urlencode(qs)}',
|
f'https://www.radiokapital.pl/wp-json/kapital/v1/{resource}?{urllib.parse.urlencode(qs)}',
|
||||||
video_id, note=note)
|
video_id, note=note)
|
||||||
|
|
||||||
def _parse_episode(self, data):
|
def _parse_episode(self, data):
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
|
import datetime
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from .common import InfoExtractor, SearchInfoExtractor
|
from .common import InfoExtractor, SearchInfoExtractor
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
@ -157,7 +157,7 @@ class RokfinIE(InfoExtractor):
|
|||||||
self.raise_login_required('This video is only available to premium users', True, method='cookies')
|
self.raise_login_required('This video is only available to premium users', True, method='cookies')
|
||||||
elif scheduled:
|
elif scheduled:
|
||||||
self.raise_no_formats(
|
self.raise_no_formats(
|
||||||
f'Stream is offline; scheduled for {datetime.fromtimestamp(scheduled).strftime("%Y-%m-%d %H:%M:%S")}',
|
f'Stream is offline; scheduled for {datetime.datetime.fromtimestamp(scheduled).strftime("%Y-%m-%d %H:%M:%S")}',
|
||||||
video_id=video_id, expected=True)
|
video_id=video_id, expected=True)
|
||||||
|
|
||||||
uploader = traverse_obj(metadata, ('createdBy', 'username'), ('creator', 'username'))
|
uploader = traverse_obj(metadata, ('createdBy', 'username'), ('creator', 'username'))
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import base64
|
import base64
|
||||||
|
import datetime
|
||||||
import functools
|
import functools
|
||||||
import itertools
|
import itertools
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from .common import InfoExtractor
|
from .common import InfoExtractor
|
||||||
from ..networking import HEADRequest
|
from ..networking import HEADRequest
|
||||||
@ -69,7 +69,7 @@ class TenPlayIE(InfoExtractor):
|
|||||||
username, password = self._get_login_info()
|
username, password = self._get_login_info()
|
||||||
if username is None or password is None:
|
if username is None or password is None:
|
||||||
self.raise_login_required('Your 10play account\'s details must be provided with --username and --password.')
|
self.raise_login_required('Your 10play account\'s details must be provided with --username and --password.')
|
||||||
_timestamp = datetime.now().strftime('%Y%m%d000000')
|
_timestamp = datetime.datetime.now().strftime('%Y%m%d000000')
|
||||||
_auth_header = base64.b64encode(_timestamp.encode('ascii')).decode('ascii')
|
_auth_header = base64.b64encode(_timestamp.encode('ascii')).decode('ascii')
|
||||||
data = self._download_json('https://10play.com.au/api/user/auth', video_id, 'Getting bearer token', headers={
|
data = self._download_json('https://10play.com.au/api/user/auth', video_id, 'Getting bearer token', headers={
|
||||||
'X-Network-Ten-Auth': _auth_header,
|
'X-Network-Ten-Auth': _auth_header,
|
||||||
|
Loading…
x
Reference in New Issue
Block a user