2018-02-23 13:16:25 -06:00
|
|
|
# -*- coding: utf-8 -*-
|
2018-02-27 12:33:07 -06:00
|
|
|
import isodate
|
2018-02-23 13:16:25 -06:00
|
|
|
import youtube_dl
|
2018-03-12 17:11:05 -06:00
|
|
|
import logging
|
2019-06-17 06:01:55 -05:00
|
|
|
import wx
|
2019-06-18 16:16:29 -05:00
|
|
|
import config
|
2018-02-23 13:16:25 -06:00
|
|
|
from googleapiclient.discovery import build
|
|
|
|
from googleapiclient.errors import HttpError
|
2018-02-27 12:33:07 -06:00
|
|
|
from update.utils import seconds_to_string
|
2019-06-18 16:16:29 -05:00
|
|
|
from .import base
|
2018-02-23 13:16:25 -06:00
|
|
|
|
|
|
|
DEVELOPER_KEY = "AIzaSyCU_hvZJEjLlAGAnlscquKEkE8l0lVOfn0"
|
|
|
|
YOUTUBE_API_SERVICE_NAME = "youtube"
|
|
|
|
YOUTUBE_API_VERSION = "v3"
|
|
|
|
|
2018-03-12 17:11:05 -06:00
|
|
|
log = logging.getLogger("extractors.youtube.com")
|
|
|
|
|
2018-02-23 13:16:25 -06:00
|
|
|
class interface(object):
|
2019-06-12 22:28:58 -05:00
|
|
|
name = "YouTube"
|
2019-06-18 16:16:29 -05:00
|
|
|
enabled = config.app["services"]["youtube"].get("enabled")
|
2018-02-23 13:16:25 -06:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.results = []
|
2018-02-25 04:54:18 -06:00
|
|
|
self.needs_transcode = True
|
2018-03-12 17:11:05 -06:00
|
|
|
log.debug("started extraction service for {0}".format(self.name,))
|
2019-06-13 17:41:05 -05:00
|
|
|
self.file_extension = "mp3"
|
2018-02-23 13:16:25 -06:00
|
|
|
|
|
|
|
def search(self, text, page=1):
|
2018-12-29 20:18:20 -06:00
|
|
|
if text == "" or text == None:
|
|
|
|
raise ValueError("Text must be passed and should not be blank.")
|
2018-04-01 12:36:20 -05:00
|
|
|
if text.startswith("https") or text.startswith("http"):
|
|
|
|
return self.search_from_url(text)
|
2018-02-23 13:16:25 -06:00
|
|
|
type = "video"
|
2019-06-20 10:19:18 -05:00
|
|
|
max_results = config.app["services"]["youtube"]["max_results"]
|
2018-03-12 17:11:05 -06:00
|
|
|
log.debug("Retrieving data from Youtube...")
|
2018-02-23 13:16:25 -06:00
|
|
|
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY)
|
|
|
|
search_response = youtube.search().list(q=text, part="id,snippet", maxResults=max_results, type=type).execute()
|
|
|
|
self.results = []
|
2018-02-27 12:33:07 -06:00
|
|
|
ids = []
|
2018-02-23 13:16:25 -06:00
|
|
|
for search_result in search_response.get("items", []):
|
|
|
|
if search_result["id"]["kind"] == "youtube#video":
|
2019-06-18 16:16:29 -05:00
|
|
|
s = base.song(self)
|
2018-02-23 13:16:25 -06:00
|
|
|
s.title = search_result["snippet"]["title"]
|
2018-02-27 12:33:07 -06:00
|
|
|
ids.append(search_result["id"]["videoId"])
|
2018-02-23 13:16:25 -06:00
|
|
|
s.url = "https://www.youtube.com/watch?v="+search_result["id"]["videoId"]
|
|
|
|
self.results.append(s)
|
2018-02-27 12:33:07 -06:00
|
|
|
ssr = youtube.videos().list(id=",".join(ids), part="contentDetails", maxResults=1).execute()
|
|
|
|
for i in range(len(self.results)):
|
|
|
|
self.results[i].duration = seconds_to_string(isodate.parse_duration(ssr["items"][i]["contentDetails"]["duration"]).total_seconds())
|
2018-03-12 17:11:05 -06:00
|
|
|
log.debug("{0} results found.".format(len(self.results)))
|
2018-02-23 13:16:25 -06:00
|
|
|
|
2018-04-01 12:36:20 -05:00
|
|
|
def search_from_url(self, url):
|
|
|
|
log.debug("Getting download URL for {0}".format(url,))
|
|
|
|
if "playlist?list=" in url:
|
|
|
|
return self.search_from_playlist(url)
|
2019-06-13 17:41:05 -05:00
|
|
|
ydl = youtube_dl.YoutubeDL({'quiet': True, 'no_warnings': True, 'logger': log, 'prefer-free-formats': True, 'format': 'bestaudio', 'outtmpl': u'%(id)s%(ext)s'})
|
2018-04-01 12:36:20 -05:00
|
|
|
with ydl:
|
|
|
|
result = ydl.extract_info(url, download=False)
|
|
|
|
if 'entries' in result:
|
|
|
|
videos = result['entries']
|
|
|
|
else:
|
|
|
|
videos = [result]
|
|
|
|
for video in videos:
|
|
|
|
s = baseFile.song(self)
|
|
|
|
s.title = video["title"]
|
|
|
|
s.url = video["webpage_url"] # Cannot use direct URL here cause Youtube URLS expire after a minute.
|
|
|
|
s.duration = seconds_to_string(video["duration"])
|
|
|
|
self.results.append(s)
|
|
|
|
log.debug("{0} results found.".format(len(self.results)))
|
|
|
|
|
|
|
|
def search_from_playlist(self, url):
|
|
|
|
id = url.split("=")[1]
|
|
|
|
max_results = 50
|
|
|
|
log.debug("Retrieving data from Youtube...")
|
|
|
|
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY)
|
|
|
|
search_response = youtube.playlistItems().list(playlistId=id, part="id, status, snippet", maxResults=max_results).execute()
|
|
|
|
self.results = []
|
|
|
|
ids = []
|
|
|
|
for search_result in search_response.get("items", []):
|
|
|
|
if search_result["status"]["privacyStatus"] != "public":
|
|
|
|
continue
|
|
|
|
s = baseFile.song(self)
|
|
|
|
s.title = search_result["snippet"]["title"]
|
|
|
|
ids.append(search_result["snippet"]["resourceId"]["videoId"])
|
|
|
|
s.url = "https://www.youtube.com/watch?v="+search_result["snippet"]["resourceId"]["videoId"]
|
|
|
|
self.results.append(s)
|
|
|
|
ssr = youtube.videos().list(id=",".join(ids), part="contentDetails", maxResults=50).execute()
|
|
|
|
for i in range(len(self.results)):
|
|
|
|
self.results[i].duration = seconds_to_string(isodate.parse_duration(ssr["items"][i]["contentDetails"]["duration"]).total_seconds())
|
|
|
|
log.debug("{0} results found.".format(len(self.results)))
|
|
|
|
|
2018-02-23 13:16:25 -06:00
|
|
|
def get_download_url(self, url):
|
2018-03-12 17:11:05 -06:00
|
|
|
log.debug("Getting download URL for {0}".format(url,))
|
|
|
|
ydl = youtube_dl.YoutubeDL({'quiet': True, 'no_warnings': True, 'logger': log, 'format': 'bestaudio/best', 'outtmpl': u'%(id)s%(ext)s'})
|
2018-02-23 13:16:25 -06:00
|
|
|
with ydl:
|
|
|
|
result = ydl.extract_info(url, download=False)
|
|
|
|
if 'entries' in result:
|
|
|
|
video = result['entries'][0]
|
|
|
|
else:
|
|
|
|
video = result
|
2019-06-13 17:41:05 -05:00
|
|
|
# From here we should extract the first format so it will contain audio only.
|
|
|
|
log.debug("Download URL: {0}".format(video["formats"][0]["url"],))
|
|
|
|
return video["formats"][0]["url"]
|
2018-03-12 17:11:05 -06:00
|
|
|
|
|
|
|
def format_track(self, item):
|
2019-06-17 06:01:55 -05:00
|
|
|
return "{0} {1}".format(item.title, item.duration)
|
|
|
|
|
2019-06-18 16:16:29 -05:00
|
|
|
class settings(base.baseSettings):
|
2019-06-17 06:01:55 -05:00
|
|
|
name = _("Youtube Settings")
|
|
|
|
config_section = "youtube"
|
|
|
|
|
|
|
|
def __init__(self, parent):
|
|
|
|
super(settings, self).__init__(parent=parent)
|
|
|
|
sizer = wx.BoxSizer(wx.VERTICAL)
|
2019-06-18 16:16:29 -05:00
|
|
|
self.enabled = wx.CheckBox(self, wx.NewId(), _("Enable this service"))
|
|
|
|
self.enabled.Bind(wx.EVT_CHECKBOX, self.on_enabled)
|
|
|
|
self.map.append(("enabled", self.enabled))
|
|
|
|
sizer.Add(self.enabled, 0, wx.ALL, 5)
|
2019-06-20 10:19:18 -05:00
|
|
|
max_results_label = wx.StaticText(self, wx.NewId(), _("Max results per page"))
|
|
|
|
self.max_results = wx.SpinCtrl(self, wx.NewId())
|
|
|
|
self.max_results.SetRange(1, 50)
|
|
|
|
max_results_sizer = wx.BoxSizer(wx.HORIZONTAL)
|
|
|
|
max_results_sizer.Add(max_results_label, 0, wx.ALL, 5)
|
|
|
|
max_results_sizer.Add(self.max_results, 0, wx.ALL, 5)
|
|
|
|
self.map.append(("max_results", self.max_results))
|
2019-06-17 06:01:55 -05:00
|
|
|
self.transcode = wx.CheckBox(self, wx.NewId(), _("Enable transcode when downloading"))
|
|
|
|
self.map.append(("transcode", self.transcode))
|
|
|
|
sizer.Add(self.transcode, 0, wx.ALL, 5)
|
|
|
|
self.SetSizer(sizer)
|
2019-06-18 16:16:29 -05:00
|
|
|
|
|
|
|
def on_enabled(self, *args, **kwargs):
|
|
|
|
for i in self.map:
|
|
|
|
if i[1] != self.enabled:
|
|
|
|
if self.enabled.GetValue() == True:
|
|
|
|
i[1].Enable(True)
|
|
|
|
else:
|
|
|
|
i[1].Enable(False)
|