Compare commits
13 Commits
2015.02.02
...
2015.02.02
Author | SHA1 | Date | |
---|---|---|---|
869b4aeff4 | |||
cc9ca3ba6e | |||
ea71034bd3 | |||
9fffd0469f | |||
ae7773942e | |||
469a64cebf | |||
aae3fdcfae | |||
6a66904f8e | |||
78271e3319 | |||
92bf0bcdf8 | |||
1283204917 | |||
6789defea9 | |||
e77d2975af |
10
README.md
10
README.md
@ -368,11 +368,11 @@ which means you can modify it, redistribute it or use it however you like.
|
|||||||
--add-metadata write metadata to the video file
|
--add-metadata write metadata to the video file
|
||||||
--xattrs write metadata to the video file's xattrs
|
--xattrs write metadata to the video file's xattrs
|
||||||
(using dublin core and xdg standards)
|
(using dublin core and xdg standards)
|
||||||
--fixup POLICY (experimental) Automatically correct known
|
--fixup POLICY Automatically correct known faults of the
|
||||||
faults of the file. One of never (do
|
file. One of never (do nothing), warn (only
|
||||||
nothing), warn (only emit a warning),
|
emit a warning), detect_or_warn(the
|
||||||
detect_or_warn(check whether we can do
|
default; fix file if we can, warn
|
||||||
anything about it, warn otherwise
|
otherwise)
|
||||||
--prefer-avconv Prefer avconv over ffmpeg for running the
|
--prefer-avconv Prefer avconv over ffmpeg for running the
|
||||||
postprocessors (default)
|
postprocessors (default)
|
||||||
--prefer-ffmpeg Prefer ffmpeg over avconv for running the
|
--prefer-ffmpeg Prefer ffmpeg over avconv for running the
|
||||||
|
@ -45,6 +45,12 @@ class ExternalFD(FileDownloader):
|
|||||||
def supports(cls, info_dict):
|
def supports(cls, info_dict):
|
||||||
return info_dict['protocol'] in ('http', 'https', 'ftp', 'ftps')
|
return info_dict['protocol'] in ('http', 'https', 'ftp', 'ftps')
|
||||||
|
|
||||||
|
def _source_address(self, command_option):
|
||||||
|
source_address = self.params.get('source_address')
|
||||||
|
if source_address is None:
|
||||||
|
return []
|
||||||
|
return [command_option, source_address]
|
||||||
|
|
||||||
def _call_downloader(self, tmpfilename, info_dict):
|
def _call_downloader(self, tmpfilename, info_dict):
|
||||||
""" Either overwrite this or implement _make_cmd """
|
""" Either overwrite this or implement _make_cmd """
|
||||||
cmd = self._make_cmd(tmpfilename, info_dict)
|
cmd = self._make_cmd(tmpfilename, info_dict)
|
||||||
@ -72,6 +78,7 @@ class CurlFD(ExternalFD):
|
|||||||
cmd = [self.exe, '-o', tmpfilename]
|
cmd = [self.exe, '-o', tmpfilename]
|
||||||
for key, val in info_dict['http_headers'].items():
|
for key, val in info_dict['http_headers'].items():
|
||||||
cmd += ['--header', '%s: %s' % (key, val)]
|
cmd += ['--header', '%s: %s' % (key, val)]
|
||||||
|
cmd += self._source_address('--interface')
|
||||||
cmd += ['--', info_dict['url']]
|
cmd += ['--', info_dict['url']]
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
@ -81,6 +88,7 @@ class WgetFD(ExternalFD):
|
|||||||
cmd = [self.exe, '-O', tmpfilename, '-nv', '--no-cookies']
|
cmd = [self.exe, '-O', tmpfilename, '-nv', '--no-cookies']
|
||||||
for key, val in info_dict['http_headers'].items():
|
for key, val in info_dict['http_headers'].items():
|
||||||
cmd += ['--header', '%s: %s' % (key, val)]
|
cmd += ['--header', '%s: %s' % (key, val)]
|
||||||
|
cmd += self._source_address('--bind-address')
|
||||||
cmd += ['--', info_dict['url']]
|
cmd += ['--', info_dict['url']]
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
@ -96,6 +104,7 @@ class Aria2cFD(ExternalFD):
|
|||||||
cmd += ['--out', os.path.basename(tmpfilename)]
|
cmd += ['--out', os.path.basename(tmpfilename)]
|
||||||
for key, val in info_dict['http_headers'].items():
|
for key, val in info_dict['http_headers'].items():
|
||||||
cmd += ['--header', '%s: %s' % (key, val)]
|
cmd += ['--header', '%s: %s' % (key, val)]
|
||||||
|
cmd += self._source_address('--interface')
|
||||||
cmd += ['--', info_dict['url']]
|
cmd += ['--', info_dict['url']]
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
@ -3,6 +3,9 @@ from __future__ import unicode_literals
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from socket import error as SocketError
|
||||||
|
import errno
|
||||||
|
|
||||||
from .common import FileDownloader
|
from .common import FileDownloader
|
||||||
from ..compat import (
|
from ..compat import (
|
||||||
compat_urllib_request,
|
compat_urllib_request,
|
||||||
@ -99,6 +102,11 @@ class HttpFD(FileDownloader):
|
|||||||
resume_len = 0
|
resume_len = 0
|
||||||
open_mode = 'wb'
|
open_mode = 'wb'
|
||||||
break
|
break
|
||||||
|
except SocketError as e:
|
||||||
|
if e.errno != errno.ECONNRESET:
|
||||||
|
# Connection reset is no problem, just retry
|
||||||
|
raise
|
||||||
|
|
||||||
# Retry
|
# Retry
|
||||||
count += 1
|
count += 1
|
||||||
if count <= retries:
|
if count <= retries:
|
||||||
|
@ -182,6 +182,7 @@ from .heise import HeiseIE
|
|||||||
from .hellporno import HellPornoIE
|
from .hellporno import HellPornoIE
|
||||||
from .helsinki import HelsinkiIE
|
from .helsinki import HelsinkiIE
|
||||||
from .hentaistigma import HentaiStigmaIE
|
from .hentaistigma import HentaiStigmaIE
|
||||||
|
from .historicfilms import HistoricFilmsIE
|
||||||
from .hitbox import HitboxIE, HitboxLiveIE
|
from .hitbox import HitboxIE, HitboxLiveIE
|
||||||
from .hornbunny import HornBunnyIE
|
from .hornbunny import HornBunnyIE
|
||||||
from .hostingbulk import HostingBulkIE
|
from .hostingbulk import HostingBulkIE
|
||||||
|
@ -25,9 +25,15 @@ class DRTVIE(SubtitlesInfoExtractor):
|
|||||||
def _real_extract(self, url):
|
def _real_extract(self, url):
|
||||||
video_id = self._match_id(url)
|
video_id = self._match_id(url)
|
||||||
|
|
||||||
programcard = self._download_json(
|
webpage = self._download_webpage(url, video_id)
|
||||||
'http://www.dr.dk/mu/programcard/expanded/%s' % video_id, video_id, 'Downloading video JSON')
|
|
||||||
|
|
||||||
|
video_id = self._search_regex(
|
||||||
|
r'data-(?:material-identifier|episode-slug)="([^"]+)"',
|
||||||
|
webpage, 'video id')
|
||||||
|
|
||||||
|
programcard = self._download_json(
|
||||||
|
'http://www.dr.dk/mu/programcard/expanded/%s' % video_id,
|
||||||
|
video_id, 'Downloading video JSON')
|
||||||
data = programcard['Data'][0]
|
data = programcard['Data'][0]
|
||||||
|
|
||||||
title = data['Title']
|
title = data['Title']
|
||||||
|
46
youtube_dl/extractor/historicfilms.py
Normal file
46
youtube_dl/extractor/historicfilms.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from .common import InfoExtractor
|
||||||
|
from ..utils import parse_duration
|
||||||
|
|
||||||
|
|
||||||
|
class HistoricFilmsIE(InfoExtractor):
|
||||||
|
_VALID_URL = r'https?://(?:www\.)?historicfilms\.com/(?:tapes/|play)(?P<id>\d+)'
|
||||||
|
_TEST = {
|
||||||
|
'url': 'http://www.historicfilms.com/tapes/4728',
|
||||||
|
'md5': 'd4a437aec45d8d796a38a215db064e9a',
|
||||||
|
'info_dict': {
|
||||||
|
'id': '4728',
|
||||||
|
'ext': 'mov',
|
||||||
|
'title': 'Historic Films: GP-7',
|
||||||
|
'description': 'md5:1a86a0f3ac54024e419aba97210d959a',
|
||||||
|
'thumbnail': 're:^https?://.*\.jpg$',
|
||||||
|
'duration': 2096,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def _real_extract(self, url):
|
||||||
|
video_id = self._match_id(url)
|
||||||
|
|
||||||
|
webpage = self._download_webpage(url, video_id)
|
||||||
|
|
||||||
|
tape_id = self._search_regex(
|
||||||
|
r'class="tapeId">([^<]+)<', webpage, 'tape id')
|
||||||
|
|
||||||
|
title = self._og_search_title(webpage)
|
||||||
|
description = self._og_search_description(webpage)
|
||||||
|
thumbnail = self._html_search_meta(
|
||||||
|
'thumbnailUrl', webpage, 'thumbnails') or self._og_search_thumbnail(webpage)
|
||||||
|
duration = parse_duration(self._html_search_meta(
|
||||||
|
'duration', webpage, 'duration'))
|
||||||
|
|
||||||
|
video_url = 'http://www.historicfilms.com/video/%s_%s_web.mov' % (tape_id, video_id)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'id': video_id,
|
||||||
|
'url': video_url,
|
||||||
|
'title': title,
|
||||||
|
'description': description,
|
||||||
|
'thumbnail': thumbnail,
|
||||||
|
'duration': duration,
|
||||||
|
}
|
@ -698,10 +698,9 @@ def parseOpts(overrideArguments=None):
|
|||||||
postproc.add_option(
|
postproc.add_option(
|
||||||
'--fixup',
|
'--fixup',
|
||||||
metavar='POLICY', dest='fixup', default='detect_or_warn',
|
metavar='POLICY', dest='fixup', default='detect_or_warn',
|
||||||
help='(experimental) Automatically correct known faults of the file. '
|
help='Automatically correct known faults of the file. '
|
||||||
'One of never (do nothing), warn (only emit a warning), '
|
'One of never (do nothing), warn (only emit a warning), '
|
||||||
'detect_or_warn(check whether we can do anything about it, warn '
|
'detect_or_warn(the default; fix file if we can, warn otherwise)')
|
||||||
'otherwise')
|
|
||||||
postproc.add_option(
|
postproc.add_option(
|
||||||
'--prefer-avconv',
|
'--prefer-avconv',
|
||||||
action='store_false', dest='prefer_ffmpeg',
|
action='store_false', dest='prefer_ffmpeg',
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
__version__ = '2015.02.02'
|
__version__ = '2015.02.02.1'
|
||||||
|
Reference in New Issue
Block a user