]> asedeno.scripts.mit.edu Git - youtube-dl.git/blob - youtube_dl/extractor/streetvoice.py
Support more deeply nested ptmd_path with test, update tests
[youtube-dl.git] / youtube_dl / extractor / streetvoice.py
1 # coding: utf-8
2 from __future__ import unicode_literals
3
4 from .common import InfoExtractor
5 from ..utils import (
6     int_or_none,
7     parse_iso8601,
8     str_or_none,
9     strip_or_none,
10     try_get,
11     urljoin,
12 )
13
14
15 class StreetVoiceIE(InfoExtractor):
16     _VALID_URL = r'https?://(?:.+?\.)?streetvoice\.com/[^/]+/songs/(?P<id>[0-9]+)'
17     _TESTS = [{
18         'url': 'https://streetvoice.com/skippylu/songs/123688/',
19         'md5': '0eb535970629a5195685355f3ed60bfd',
20         'info_dict': {
21             'id': '123688',
22             'ext': 'mp3',
23             'title': '流浪',
24             'description': 'md5:8eb0bfcc9dcd8aa82bd6efca66e3fea6',
25             'thumbnail': r're:^https?://.*\.jpg',
26             'duration': 270,
27             'upload_date': '20100923',
28             'uploader': 'Crispy脆樂團',
29             'uploader_id': '627810',
30             'uploader_url': 're:^https?://streetvoice.com/skippylu/',
31             'timestamp': 1285261661,
32             'view_count': int,
33             'like_count': int,
34             'comment_count': int,
35             'repost_count': int,
36             'track': '流浪',
37             'track_id': '123688',
38             'album': '2010',
39         }
40     }, {
41         'url': 'http://tw.streetvoice.com/skippylu/songs/94440/',
42         'only_matching': True,
43     }]
44
45     def _real_extract(self, url):
46         song_id = self._match_id(url)
47         base_url = 'https://streetvoice.com/api/v4/song/%s/' % song_id
48         song = self._download_json(base_url, song_id, query={
49             'fields': 'album,comments_count,created_at,id,image,length,likes_count,name,nickname,plays_count,profile,share_count,synopsis,user,username',
50         })
51         title = song['name']
52
53         formats = []
54         for suffix, format_id in [('hls/file', 'hls'), ('file', 'http'), ('file/original', 'original')]:
55             f_url = (self._download_json(
56                 base_url + suffix + '/', song_id,
57                 'Downloading %s format URL' % format_id,
58                 data=b'', fatal=False) or {}).get('file')
59             if not f_url:
60                 continue
61             f = {
62                 'ext': 'mp3',
63                 'format_id': format_id,
64                 'url': f_url,
65                 'vcodec': 'none',
66             }
67             if format_id == 'hls':
68                 f['protocol'] = 'm3u8_native'
69             abr = self._search_regex(r'\.mp3\.(\d+)k', f_url, 'bitrate', default=None)
70             if abr:
71                 abr = int(abr)
72                 f.update({
73                     'abr': abr,
74                     'tbr': abr,
75                 })
76             formats.append(f)
77
78         user = song.get('user') or {}
79         username = user.get('username')
80         get_count = lambda x: int_or_none(song.get(x + '_count'))
81
82         return {
83             'id': song_id,
84             'formats': formats,
85             'title': title,
86             'description': strip_or_none(song.get('synopsis')),
87             'thumbnail': song.get('image'),
88             'duration': int_or_none(song.get('length')),
89             'timestamp': parse_iso8601(song.get('created_at')),
90             'uploader': try_get(user, lambda x: x['profile']['nickname']),
91             'uploader_id': str_or_none(user.get('id')),
92             'uploader_url': urljoin(url, '/%s/' % username) if username else None,
93             'view_count': get_count('plays'),
94             'like_count': get_count('likes'),
95             'comment_count': get_count('comments'),
96             'repost_count': get_count('share'),
97             'track': title,
98             'track_id': song_id,
99             'album': try_get(song, lambda x: x['album']['name']),
100         }