From 8efae823020655171f44811cd05b971ff05ca177 Mon Sep 17 00:00:00 2001 From: pluja Date: Sat, 10 Oct 2020 17:26:28 +0200 Subject: [PATCH] First update, use youtube-local adapted 'API' --- app/routes.py | 575 ++++++++-------- app/templates/_video_comment.html | 5 +- app/templates/video.html | 59 +- youtube/channel.py | 281 ++++++++ youtube/comments.py | 145 ++++ youtube/opensearch.xml | 11 + youtube/playlist.py | 123 ++++ youtube/proto.py | 129 ++++ youtube/search.py | 105 +++ youtube/util.py | 397 +++++++++++ youtube/utils.py | 61 ++ youtube/watch.py | 246 +++++++ youtube/yt_data_extract/__init__.py | 12 + youtube/yt_data_extract/common.py | 470 +++++++++++++ youtube/yt_data_extract/everything_else.py | 281 ++++++++ youtube/yt_data_extract/watch_extraction.py | 689 ++++++++++++++++++++ youtube_data/channels.py | 2 +- youtube_data/comments.py | 130 ---- youtube_data/search.py | 4 +- youtube_data/utils.py | 12 - youtube_data/videos.py | 281 -------- 21 files changed, 3296 insertions(+), 722 deletions(-) create mode 100644 youtube/channel.py create mode 100644 youtube/comments.py create mode 100644 youtube/opensearch.xml create mode 100644 youtube/playlist.py create mode 100644 youtube/proto.py create mode 100644 youtube/search.py create mode 100644 youtube/util.py create mode 100644 youtube/utils.py create mode 100644 youtube/watch.py create mode 100644 youtube/yt_data_extract/__init__.py create mode 100644 youtube/yt_data_extract/common.py create mode 100644 youtube/yt_data_extract/everything_else.py create mode 100644 youtube/yt_data_extract/watch_extraction.py delete mode 100644 youtube_data/comments.py delete mode 100644 youtube_data/utils.py delete mode 100644 youtube_data/videos.py diff --git a/app/routes.py b/app/routes.py index 3075aef..9e26eaf 100644 --- a/app/routes.py +++ b/app/routes.py @@ -1,36 +1,38 @@ -from flask import render_template, flash, redirect, url_for, request, send_from_directory, Markup -from app.forms import LoginForm, RegistrationForm, EmptyForm, SearchForm, ChannelForm -from app.models import User, twitterPost, ytPost, Post, youtubeFollow, twitterFollow -from flask_login import login_user, logout_user, current_user, login_required -from flask import Flask, Response, stream_with_context -from requests_futures.sessions import FuturesSession -from werkzeug.datastructures import Headers + +import datetime +import glob +import json +import math +import os +import random +import re +import time +import urllib from concurrent.futures import as_completed -from werkzeug.utils import secure_filename -from youtube_search import YoutubeSearch -from werkzeug.urls import url_parse -from youtube_dl import YoutubeDL -from flask_caching import Cache -from numerize import numerize -from bs4 import BeautifulSoup -from xml.dom import minidom -from app import app, db -from re import findall -import random, string -import time, datetime + +import bleach import feedparser import requests -import bleach -import urllib -import math -import json -import glob -import re -import os +from bs4 import BeautifulSoup +from flask import Response +from flask import render_template, flash, redirect, url_for, request, send_from_directory, Markup +from flask_caching import Cache +from flask_login import login_user, logout_user, current_user, login_required +from numerize import numerize +from requests_futures.sessions import FuturesSession +from werkzeug.datastructures import Headers +from werkzeug.urls import url_parse +from werkzeug.utils import secure_filename +from youtube_search import YoutubeSearch + +from app import app, db +from app.forms import LoginForm, RegistrationForm, EmptyForm, SearchForm, ChannelForm +from app.models import User, twitterPost, ytPost, Post, youtubeFollow, twitterFollow +from youtube import comments, utils +from youtube import watch as ytwatch ######################################### -from youtube_data import videos as ytvids from youtube_data import search as yts -from youtube_data import comments as ytcomments + ######################################### cache = Cache(config={'CACHE_TYPE': 'simple'}) @@ -42,9 +44,10 @@ config = json.load(open('yotter-config.json')) ########################## #### Config variables #### ########################## -NITTERINSTANCE = config['nitterInstance'] # Must be https://.../ +NITTERINSTANCE = config['nitterInstance'] # Must be https://.../ YOUTUBERSS = "https://www.youtube.com/feeds/videos.xml?channel_id=" + ########################## #### Global variables #### ########################## @@ -58,6 +61,7 @@ def before_request(): current_user.set_last_seen() db.session.commit() + @app.route('/') @app.route('/index') @login_required @@ -65,6 +69,7 @@ def before_request(): def index(): return render_template('home.html', config=config) + @app.route('/twitter') @app.route('/twitter/') @login_required @@ -77,21 +82,21 @@ def twitter(page=0): posts = [] cache_file = glob.glob("app/cache/{}_*".format(current_user.username)) - if(len(cache_file) > 0): - time_diff = round(time.time()-os.path.getmtime(cache_file[0])) + if (len(cache_file) > 0): + time_diff = round(time.time() - os.path.getmtime(cache_file[0])) else: time_diff = 999 # If cache file is more than 1 minute old if page == 0 and time_diff > 60: if cache_file: for f in cache_file: - os.remove(f) + os.remove(f) feed = getFeed(followingList) cache_file = "{u}_{d}.json".format(u=current_user.username, d=time.strftime("%Y%m%d-%H%M%S")) with open("app/cache/{}".format(cache_file), 'w') as fp: json.dump(feed, fp) # Else, refresh feed - else: + else: try: cache_file = glob.glob("app/cache/{}*".format(current_user.username))[0] with open(cache_file, 'r') as fp: @@ -106,14 +111,14 @@ def twitter(page=0): posts.sort(key=lambda x: datetime.datetime.strptime(x['timeStamp'], '%d/%m/%Y %H:%M:%S'), reverse=True) # Items range per page - page_items = page*16 - offset = page_items+16 + page_items = page * 16 + offset = page_items + 16 # Pagination logic - init_page = page-3 + init_page = page - 3 if init_page < 0: init_page = 0 - total_pages = page+5 - max_pages = int(math.ceil(len(posts)/10)) # Total number of pages. + total_pages = page + 5 + max_pages = int(math.ceil(len(posts) / 10)) # Total number of pages. if total_pages > max_pages: total_pages = max_pages @@ -127,7 +132,10 @@ def twitter(page=0): profilePic = avatarPath else: profilePic = posts[0]['profilePic'] - return render_template('twitter.html', title='Yotter | Twitter', posts=posts, avatar=avatarPath, profilePic = profilePic, followedCount=followCount, form=form, config=config, pages=total_pages, init_page=init_page, actual_page=page) + return render_template('twitter.html', title='Yotter | Twitter', posts=posts, avatar=avatarPath, + profilePic=profilePic, followedCount=followCount, form=form, config=config, + pages=total_pages, init_page=init_page, actual_page=page) + @app.route('/savePost/', methods=['POST']) @login_required @@ -135,13 +143,15 @@ def savePost(url): savedUrl = url.replace('~', '/') r = requests.get(savedUrl) html = BeautifulSoup(str(r.content), "lxml") - post = html.body.find('div', attrs={'class':'main-tweet'}) + post = html.body.find('div', attrs={'class': 'main-tweet'}) newPost = Post() newPost.url = savedUrl - newPost.username = post.find('a','username').text.replace("@","") - newPost.body = post.find_all('div', attrs={'class':'tweet-content'})[0].text.encode('latin1').decode('unicode_escape').encode('latin1').decode('utf8') - newPost.timestamp = post.find_all('p', attrs={'class':'tweet-published'})[0].text.encode('latin1').decode('unicode_escape').encode('latin1').decode('utf8') + newPost.username = post.find('a', 'username').text.replace("@", "") + newPost.body = post.find_all('div', attrs={'class': 'tweet-content'})[0].text.encode('latin1').decode( + 'unicode_escape').encode('latin1').decode('utf8') + newPost.timestamp = post.find_all('p', attrs={'class': 'tweet-published'})[0].text.encode('latin1').decode( + 'unicode_escape').encode('latin1').decode('utf8') newPost.user_id = current_user.id try: db.session.add(newPost) @@ -150,12 +160,14 @@ def savePost(url): flash("Post could not be saved. Either it was already saved or there was an error.") return redirect(request.referrer) + @app.route('/saved') @login_required def saved(): savedPosts = current_user.saved_posts().all() return render_template('saved.html', title='Saved', savedPosts=savedPosts, config=config) + @app.route('/deleteSaved/', methods=['POST']) @login_required def deleteSaved(id): @@ -164,6 +176,7 @@ def deleteSaved(id): db.session.commit() return redirect(url_for('saved')) + @app.route('/follow/', methods=['POST']) @login_required def follow(username): @@ -173,6 +186,7 @@ def follow(username): flash("{} followed!".format(username)) return redirect(request.referrer) + def followTwitterAccount(username): if isTwitterUser(username): if not current_user.is_following_tw(username): @@ -190,6 +204,7 @@ def followTwitterAccount(username): flash("Something went wrong... try again") return False + @app.route('/unfollow/', methods=['POST']) @login_required def unfollow(username): @@ -199,6 +214,7 @@ def unfollow(username): flash("{} unfollowed!".format(username)) return redirect(request.referrer) + def twUnfollow(username): try: user = twitterFollow.query.filter_by(username=username).first() @@ -208,13 +224,15 @@ def twUnfollow(username): flash("There was an error unfollowing the user. Try again.") return redirect(request.referrer) + @app.route('/following') @login_required def following(): form = EmptyForm() followCount = len(current_user.twitter_following_list()) accounts = current_user.twitter_following_list() - return render_template('following.html', accounts = accounts, count = followCount, form = form, config=config) + return render_template('following.html', accounts=accounts, count=followCount, form=form, config=config) + @app.route('/search', methods=['GET', 'POST']) @login_required @@ -224,26 +242,27 @@ def search(): user = form.username.data results = twitterUserSearch(user) if results: - return render_template('search.html', form = form, results = results, config=config) + return render_template('search.html', form=form, results=results, config=config) else: flash("User {} not found...".format(user)) return redirect(request.referrer) else: - return render_template('search.html', form = form, config=config) + return render_template('search.html', form=form, config=config) + @app.route('/u/') @app.route('/') @login_required def u(username): if username == "favicon.ico": - return redirect(url_for('static',filename='favicons/favicon.ico')) - form = EmptyForm() - avatarPath = "img/avatars/{}.png".format(str(random.randint(1,12))) + return redirect(url_for('static', filename='favicons/favicon.ico')) + form = EmptyForm() + avatarPath = "img/avatars/{}.png".format(str(random.randint(1, 12))) user = getTwitterUserInfo(username) if not user: flash("This user is not on Twitter.") return redirect(request.referrer) - + posts = [] posts.extend(getPosts(username)) if not posts: @@ -251,6 +270,7 @@ def u(username): return render_template('user.html', posts=posts, user=user, form=form, config=config) + ######################### #### Youtube Logic ###### ######################### @@ -264,7 +284,9 @@ def youtube(): if videos: videos.sort(key=lambda x: x.date, reverse=True) print("--- {} seconds fetching youtube feed---".format(time.time() - start_time)) - return render_template('youtube.html', title="Yotter | Youtube", videos=videos, followCount=followCount, config=config) + return render_template('youtube.html', title="Yotter | Youtube", videos=videos, followCount=followCount, + config=config) + @app.route('/ytfollowing', methods=['GET', 'POST']) @login_required @@ -272,8 +294,9 @@ def ytfollowing(): form = EmptyForm() channelList = current_user.youtube_following_list() channelCount = len(channelList) - - return render_template('ytfollowing.html', form=form, channelList=channelList, channelCount=channelCount, config=config) + + return render_template('ytfollowing.html', form=form, channelList=channelList, channelCount=channelCount, + config=config) @app.route('/ytsearch', methods=['GET', 'POST']) @@ -294,24 +317,28 @@ def ytsearch(): if query: autocorrect = 1 - filters = {"time":0, "type":0, "duration":0} + filters = {"time": 0, "type": 0, "duration": 0} results = yts.search_by_terms(query, page, autocorrect, sort, filters) - next_page = "/ytsearch?q={q}&s={s}&p={p}".format(q=query, s=sort, p=int(page)+1) + next_page = "/ytsearch?q={q}&s={s}&p={p}".format(q=query, s=sort, p=int(page) + 1) if int(page) == 1: prev_page = "/ytsearch?q={q}&s={s}&p={p}".format(q=query, s=sort, p=1) else: - prev_page = "/ytsearch?q={q}&s={s}&p={p}".format(q=query, s=sort, p=int(page)-1) + prev_page = "/ytsearch?q={q}&s={s}&p={p}".format(q=query, s=sort, p=int(page) - 1) for channel in results['channels']: if config['nginxVideoStream']: channel['thumbnail'] = channel['thumbnail'].replace("~", "/") hostName = urllib.parse.urlparse(channel['thumbnail']).netloc - channel['thumbnail'] = channel['thumbnail'].replace("https://{}".format(hostName), "")+"?host="+hostName - return render_template('ytsearch.html', form=form, btform=button_form, results=results, restricted=config['restrictPublicUsage'], config=config, npage=next_page, ppage=prev_page) + channel['thumbnail'] = channel['thumbnail'].replace("https://{}".format(hostName), + "") + "?host=" + hostName + return render_template('ytsearch.html', form=form, btform=button_form, results=results, + restricted=config['restrictPublicUsage'], config=config, npage=next_page, + ppage=prev_page) else: return render_template('ytsearch.html', form=form, results=False) + @app.route('/ytfollow/', methods=['POST']) @login_required def ytfollow(channelId): @@ -320,6 +347,7 @@ def ytfollow(channelId): r = followYoutubeChannel(channelId) return redirect(request.referrer) + def followYoutubeChannel(channelId): try: channelData = YoutubeSearch.channelInfo(channelId, False) @@ -353,6 +381,7 @@ def ytunfollow(channelId): unfollowYoutubeChannel(channelId) return redirect(request.referrer) + def unfollowYoutubeChannel(channelId): try: channel = youtubeFollow.query.filter_by(channelId=channelId).first() @@ -367,6 +396,7 @@ def unfollowYoutubeChannel(channelId): except: flash("There was an error unfollowing the user. Try again.") + @app.route('/channel/', methods=['GET']) @app.route('/user/', methods=['GET']) @app.route('/c/', methods=['GET']) @@ -382,92 +412,70 @@ def channel(id): for video in channelData[1]: if config['nginxVideoStream']: hostName = urllib.parse.urlparse(video['videoThumb']).netloc - video['videoThumb'] = video['videoThumb'].replace("https://{}".format(hostName), "").replace("hqdefault", "mqdefault")+"&host="+hostName + video['videoThumb'] = video['videoThumb'].replace("https://{}".format(hostName), "").replace("hqdefault", + "mqdefault") + "&host=" + hostName else: video['videoThumb'] = video['videoThumb'].replace('/', '~') if config['nginxVideoStream']: hostName = urllib.parse.urlparse(channelData[0]['avatar']).netloc - channelData[0]['avatar'] = channelData[0]['avatar'].replace("https://{}".format(hostName), "")+"?host="+hostName + channelData[0]['avatar'] = channelData[0]['avatar'].replace("https://{}".format(hostName), + "") + "?host=" + hostName else: channelData[0]['avatar'] = channelData[0]['avatar'].replace('/', '~') - return render_template('channel.html', form=form, btform=button_form, channel=channelData[0], videos=channelData[1], restricted=config['restrictPublicUsage'], config=config) + return render_template('channel.html', form=form, btform=button_form, channel=channelData[0], videos=channelData[1], + restricted=config['restrictPublicUsage'], config=config) + def get_best_urls(urls): '''Gets URLS in youtube format (format_id, url, height) and returns best ones for yotter''' best_formats = ["22", "18", "34", "35", "36", "37", "38", "43", "44", "45", "46"] - best_urls=[] - for url in urls: - for f in best_formats: - if url['format_id'] == f: - best_urls.append(url) - return best_urls - -def get_live_urls(urls): - '''Gets URLS in youtube format (format_id, url, height) and returns best ones for yotter''' - best_formats = ["91", "92", "93", "94", "95", "96"] - best_urls=[] + best_urls = [] for url in urls: for f in best_formats: if url['format_id'] == f: best_urls.append(url) return best_urls + +def get_live_urls(urls): + """Gets URLS in youtube format (format_id, url, height) and returns best ones for yotter""" + best_formats = ["91", "92", "93", "94", "95", "96"] + best_urls = [] + for url in urls: + for f in best_formats: + if url['format_id'] == f: + best_urls.append(url) + return best_urls + + @app.route('/watch', methods=['GET']) @login_required def watch(): id = request.args.get('v', None) - info = ytvids.get_video_info(id) + info = ytwatch.extract_info(id, False, playlist_id=None, index=None) # Use nginx - try: - for url in info['video']['urls']: - hostName = urllib.parse.urlparse(url['url']).netloc - url['url'] = url['url'].replace("https://{}".format(hostName), "")+"&host="+hostName - except: - hostName = "#" - url = "#" + best_formats = ["22", "18", "34", "35", "36", "37", "38", "43", "44", "45", "46"] + for v_format in info['formats']: + hostName = urllib.parse.urlparse(v_format['url']).netloc + v_format['url'] = v_format['url'].replace("https://{}".format(hostName), "") + "&host=" + hostName + if v_format['audio_bitrate'] is not None and v_format['vcodec'] is not None: + v_format['video_valid'] = True + elif v_format['audio_bitrate'] is not None and v_format['vcodec'] is None: + v_format['audio_valid'] = True - try: - audioHostName = urllib.parse.urlparse(info['video']['audio']).netloc - audioUrl = info['video']['audio'].replace("https://{}".format(audioHostName), "")+"&host="+audioHostName - except: - audioUrl = False - - if info['video']['isUpcoming']: - vid_urls=[] - elif info['video']['isLive']: - vid_urls = get_live_urls(info['video']['urls']) - else: - vid_urls = get_best_urls(info['video']['urls']) + info['description'] = Markup(bleach.linkify(info['description'].replace("\n", "
"))) # Get comments - try: - comments = ytcomments.video_comments(id) - if comments: - comments.sort(key=lambda x: x['likes'], reverse=True) - else: - comments = False - except: - comments = False + videocomments = comments.video_comments(id, sort=0, offset=0, lc='', secret_key='') + videocomments = utils.post_process_comments_info(videocomments) + + if videocomments is not None: + videocomments.sort(key=lambda x: x['likes'], reverse=True) + + info['rating'] = str((info['like_count']/(info['like_count']+info['dislike_count']))*100)[0:4] + return render_template("video.html", info=info, title='{}'.format(info['title']), config=config, videocomments=videocomments) - video={ - 'title':info['video']['title'], - 'description':Markup(markupString(info['video']['description'])), - 'viewCount':info['video']['views'], - 'author':info['video']['author'], - 'authorUrl':"/channel/{}".format(info['owner']['id']), - 'channelId': info['owner']['id'], - 'id':id, - 'averageRating': str((float(info['video']['rating'])/5)*100), - 'videoHostName': hostName, - 'isLive': info['video']['isLive'], - 'isUpcoming': info['video']['isUpcoming'], - 'thumbnail': info['video']['thumbnail'], - 'nginxAudioUrl': audioUrl, - 'premieres': info['video']['premieres'], - 'comments': comments - } - return render_template("video.html", video=video, title='{}'.format(video['title']), config=config, urls=vid_urls) def markupString(string): string = string.replace("\n\n", "

").replace("\n", "
") @@ -477,35 +485,39 @@ def markupString(string): string = string.replace("https://twitter.com/", "/u/") return Markup(string) + ## PROXY videos through Yotter server to the client. @app.route('/stream/', methods=['GET', 'POST']) @login_required def stream(url): - #This function proxies the video stream from GoogleVideo to the client. + # This function proxies the video stream from GoogleVideo to the client. url = url.replace('YotterSlash', '/') headers = Headers() - if(url): + if (url): s = requests.Session() s.verify = True - req = s.get(url, stream = True) + req = s.get(url, stream=True) headers.add('Range', request.headers['Range']) - headers.add('Accept-Ranges','bytes') - headers.add('Content-Length', str(int(req.headers['Content-Length'])+1)) - response = Response(req.iter_content(chunk_size=10*1024), mimetype=req.headers['Content-Type'], content_type=req.headers['Content-Type'], direct_passthrough=True, headers=headers) - #enable browser file caching with etags - response.cache_control.public = True + headers.add('Accept-Ranges', 'bytes') + headers.add('Content-Length', str(int(req.headers['Content-Length']) + 1)) + response = Response(req.iter_content(chunk_size=10 * 1024), mimetype=req.headers['Content-Type'], + content_type=req.headers['Content-Type'], direct_passthrough=True, headers=headers) + # enable browser file caching with etags + response.cache_control.public = True response.cache_control.max_age = int(60000) return response else: flash("Something went wrong loading the video... Try again.") return redirect(url_for('youtube')) + def download_file(streamable): with streamable as stream: stream.raise_for_status() for chunk in stream.iter_content(chunk_size=8192): yield chunk + ######################### #### General Logic ###### ######################### @@ -529,18 +541,21 @@ def login(): return redirect(next_page) return render_template('login.html', title='Sign In', form=form, config=config) -#Proxy images through server + +# Proxy images through server @app.route('/img/', methods=['GET', 'POST']) @login_required def img(url): pic = requests.get(url.replace("~", "/")) - return Response(pic,mimetype="image/png") + return Response(pic, mimetype="image/png") + @app.route('/logout') def logout(): logout_user() return redirect(url_for('index')) + @app.route('/settings') @login_required @cache.cached(timeout=50, key_prefix='settings') @@ -550,20 +565,21 @@ def settings(): for u in users: if u.last_seen == None: u.set_last_seen() - db.session.commit() + db.session.commit() else: t = datetime.datetime.utcnow() - u.last_seen s = t.total_seconds() - m = s/60 + m = s / 60 if m < 25: - active = active+1 + active = active + 1 instanceInfo = { - "totalUsers":db.session.query(User).count(), - "active":active, + "totalUsers": db.session.query(User).count(), + "active": active, } return render_template('settings.html', info=instanceInfo, config=config, admin=current_user.is_admin) + '''@app.route('/clear_inactive_users/') @login_required def clear_inactive_users(phash): @@ -585,9 +601,10 @@ def clear_inactive_users(phash): flash("You must be admin for this action") return redirect(request.referrer)''' + @app.route('/export') @login_required -#Export data into a JSON file. Later you can import the data. +# Export data into a JSON file. Later you can import the data. def export(): a = exportData() if a: @@ -595,6 +612,7 @@ def export(): else: return redirect(url_for('error/405')) + def exportData(): twitterFollowing = current_user.twitter_following_list() youtubeFollowing = current_user.youtube_following_list() @@ -606,7 +624,7 @@ def exportData(): data['twitter'].append({ 'username': f.username }) - + for f in youtubeFollowing: data['youtube'].append({ 'channelId': f.channelId @@ -617,7 +635,8 @@ def exportData(): json.dump(data, outfile) return True except: - return False + return False + @app.route('/importdata', methods=['GET', 'POST']) @login_required @@ -643,6 +662,7 @@ def importdata(): return redirect(request.referrer) + @app.route('/deleteme', methods=['GET', 'POST']) @login_required def deleteme(): @@ -652,6 +672,7 @@ def deleteme(): logout_user() return redirect(url_for('index')) + def importYoutubeSubscriptions(file): filename = secure_filename(file.filename) try: @@ -662,16 +683,17 @@ def importYoutubeSubscriptions(file): print(e) flash("File is not valid.") + def importYotterSubscriptions(file): filename = secure_filename(file.filename) data = json.load(file) - print(data) for acc in data['twitter']: r = followTwitterAccount(acc['username']) for acc in data['youtube']: r = followYoutubeChannel(acc['channelId']) + @app.route('/register', methods=['GET', 'POST']) def register(): form = RegistrationForm() @@ -697,59 +719,67 @@ def register(): db.session.commit() flash('Congratulations, you are now a registered user!') return redirect(url_for('login')) - + return render_template('register.html', title='Register', registrations=REGISTRATIONS, form=form, config=config) + @app.route('/registrations_status/icon') def registrations_status_icon(): count = db.session.query(User).count() if count >= config['maxInstanceUsers'] or config['maxInstanceUsers'] == 0: - return redirect(url_for('static',filename='img/close.png')) + return redirect(url_for('static', filename='img/close.png')) else: - return redirect(url_for('static',filename='img/open.png')) + return redirect(url_for('static', filename='img/open.png')) + @app.route('/registrations_status/text') def registrations_status_text(): count = db.session.query(User).count() return "{c}/{t}".format(c=count, t=config['maxInstanceUsers']) + @app.route('/error/') def error(errno): return render_template('{}.html'.format(str(errno)), config=config) + def getTimeDiff(t): diff = datetime.datetime.now() - datetime.datetime(*t[:6]) if diff.days == 0: if diff.seconds > 3599: - timeString = "{}h".format(int((diff.seconds/60)/60)) + timeString = "{}h".format(int((diff.seconds / 60) / 60)) else: - timeString = "{}m".format(int(diff.seconds/60)) + timeString = "{}m".format(int(diff.seconds / 60)) else: timeString = "{}d".format(diff.days) return timeString + def isTwitterUser(username): response = requests.get('{instance}{user}/rss'.format(instance=NITTERINSTANCE, user=username)) if response.status_code == 404: return False return True + def twitterUserSearch(terms): - - response = urllib.request.urlopen('{instance}search?f=users&q={user}'.format(instance=NITTERINSTANCE, user=urllib.parse.quote(terms))).read() + response = urllib.request.urlopen( + '{instance}search?f=users&q={user}'.format(instance=NITTERINSTANCE, user=urllib.parse.quote(terms))).read() html = BeautifulSoup(str(response), "lxml") results = [] - if html.body.find('h2', attrs={'class':'timeline-none'}): + if html.body.find('h2', attrs={'class': 'timeline-none'}): return False else: - html = html.body.find_all('div', attrs={'class':'timeline-item'}) + html = html.body.find_all('div', attrs={'class': 'timeline-item'}) for item in html: user = { - "fullName": item.find('a', attrs={'class':'fullname'}).getText().encode('latin_1').decode('unicode_escape').encode('latin_1').decode('utf8'), - "username": item.find('a', attrs={'class':'username'}).getText().encode('latin_1').decode('unicode_escape').encode('latin_1').decode('utf8'), - 'avatar': "{i}{s}".format(i=NITTERINSTANCE, s=item.find('img', attrs={'class':'avatar'})['src'][1:]) + "fullName": item.find('a', attrs={'class': 'fullname'}).getText().encode('latin_1').decode( + 'unicode_escape').encode('latin_1').decode('utf8'), + "username": item.find('a', attrs={'class': 'username'}).getText().encode('latin_1').decode( + 'unicode_escape').encode('latin_1').decode('utf8'), + 'avatar': "{i}{s}".format(i=NITTERINSTANCE, s=item.find('img', attrs={'class': 'avatar'})['src'][1:]) } results.append(user) return results @@ -757,36 +787,43 @@ def twitterUserSearch(terms): def getTwitterUserInfo(username): response = urllib.request.urlopen('{instance}{user}'.format(instance=NITTERINSTANCE, user=username)).read() - #rssFeed = feedparser.parse(response.content) + # rssFeed = feedparser.parse(response.content) html = BeautifulSoup(str(response), "lxml") - if html.body.find('div', attrs={'class':'error-panel'}): + if html.body.find('div', attrs={'class': 'error-panel'}): return False else: - html = html.body.find('div', attrs={'class':'profile-card'}) + html = html.body.find('div', attrs={'class': 'profile-card'}) - if html.find('a', attrs={'class':'profile-card-fullname'}): - fullName = html.find('a', attrs={'class':'profile-card-fullname'}).getText().encode('latin1').decode('unicode_escape').encode('latin1').decode('utf8') + if html.find('a', attrs={'class': 'profile-card-fullname'}): + fullName = html.find('a', attrs={'class': 'profile-card-fullname'}).getText().encode('latin1').decode( + 'unicode_escape').encode('latin1').decode('utf8') else: fullName = None - - if html.find('div', attrs={'class':'profile-bio'}): - profileBio = html.find('div', attrs={'class':'profile-bio'}).getText().encode('latin1').decode('unicode_escape').encode('latin1').decode('utf8') + + if html.find('div', attrs={'class': 'profile-bio'}): + profileBio = html.find('div', attrs={'class': 'profile-bio'}).getText().encode('latin1').decode( + 'unicode_escape').encode('latin1').decode('utf8') else: profileBio = None user = { - "profileFullName":fullName, - "profileUsername":html.find('a', attrs={'class':'profile-card-username'}).string.encode('latin_1').decode('unicode_escape').encode('latin_1').decode('utf8'), - "profileBio":profileBio, - "tweets":html.find_all('span', attrs={'class':'profile-stat-num'})[0].string, - "following":html.find_all('span', attrs={'class':'profile-stat-num'})[1].string, - "followers":numerize.numerize(int(html.find_all('span', attrs={'class':'profile-stat-num'})[2].string.replace(",",""))), - "likes":html.find_all('span', attrs={'class':'profile-stat-num'})[3].string, - "profilePic":"{instance}{pic}".format(instance=NITTERINSTANCE, pic=html.find('a', attrs={'class':'profile-card-avatar'})['href'][1:]) + "profileFullName": fullName, + "profileUsername": html.find('a', attrs={'class': 'profile-card-username'}).string.encode('latin_1').decode( + 'unicode_escape').encode('latin_1').decode('utf8'), + "profileBio": profileBio, + "tweets": html.find_all('span', attrs={'class': 'profile-stat-num'})[0].string, + "following": html.find_all('span', attrs={'class': 'profile-stat-num'})[1].string, + "followers": numerize.numerize( + int(html.find_all('span', attrs={'class': 'profile-stat-num'})[2].string.replace(",", ""))), + "likes": html.find_all('span', attrs={'class': 'profile-stat-num'})[3].string, + "profilePic": "{instance}{pic}".format(instance=NITTERINSTANCE, + pic=html.find('a', attrs={'class': 'profile-card-avatar'})['href'][ + 1:]) } return user + def getFeed(urls): feedPosts = [] with FuturesSession() as session: @@ -794,121 +831,133 @@ def getFeed(urls): for future in as_completed(futures): res = future.result().content.decode('utf-8') html = BeautifulSoup(res, "html.parser") - userFeed = html.find_all('div', attrs={'class':'timeline-item'}) + userFeed = html.find_all('div', attrs={'class': 'timeline-item'}) if userFeed != []: - for post in userFeed[:-1]: - date_time_str = post.find('span', attrs={'class':'tweet-date'}).find('a')['title'].replace(",","") - time = datetime.datetime.now() - datetime.datetime.strptime(date_time_str, '%d/%m/%Y %H:%M:%S') - if time.days >=7: + for post in userFeed[:-1]: + date_time_str = post.find('span', attrs={'class': 'tweet-date'}).find('a')['title'].replace(",", "") + time = datetime.datetime.now() - datetime.datetime.strptime(date_time_str, '%d/%m/%Y %H:%M:%S') + if time.days >= 7: + continue + + if post.find('div', attrs={'class': 'pinned'}): + if post.find('div', attrs={'class': 'pinned'}).find('span', attrs={'icon-pin'}): continue - if post.find('div', attrs={'class':'pinned'}): - if post.find('div', attrs={'class':'pinned'}).find('span', attrs={'icon-pin'}): - continue + newPost = {} + newPost["op"] = post.find('a', attrs={'class': 'username'}).text + newPost["twitterName"] = post.find('a', attrs={'class': 'fullname'}).text + newPost["timeStamp"] = date_time_str + newPost["date"] = post.find('span', attrs={'class': 'tweet-date'}).find('a').text + newPost["content"] = Markup(post.find('div', attrs={'class': 'tweet-content'})) - newPost = {} - newPost["op"] = post.find('a', attrs={'class':'username'}).text - newPost["twitterName"] = post.find('a', attrs={'class':'fullname'}).text - newPost["timeStamp"] = date_time_str - newPost["date"] = post.find('span', attrs={'class':'tweet-date'}).find('a').text - newPost["content"] = Markup(post.find('div', attrs={'class':'tweet-content'})) - - if post.find('div', attrs={'class':'retweet-header'}): - newPost["username"] = post.find('div', attrs={'class':'retweet-header'}).find('div', attrs={'class':'icon-container'}).text - newPost["isRT"] = True + if post.find('div', attrs={'class': 'retweet-header'}): + newPost["username"] = post.find('div', attrs={'class': 'retweet-header'}).find('div', attrs={ + 'class': 'icon-container'}).text + newPost["isRT"] = True + else: + newPost["username"] = newPost["op"] + newPost["isRT"] = False + + newPost["profilePic"] = NITTERINSTANCE + \ + post.find('a', attrs={'class': 'tweet-avatar'}).find('img')['src'][1:] + newPost["url"] = NITTERINSTANCE + post.find('a', attrs={'class': 'tweet-link'})['href'][1:] + if post.find('div', attrs={'class': 'quote'}): + newPost["isReply"] = True + quote = post.find('div', attrs={'class': 'quote'}) + if quote.find('div', attrs={'class': 'quote-text'}): + newPost["replyingTweetContent"] = Markup(quote.find('div', attrs={'class': 'quote-text'})) + + if quote.find('a', attrs={'class': 'still-image'}): + newPost["replyAttachedImg"] = NITTERINSTANCE + \ + quote.find('a', attrs={'class': 'still-image'})['href'][1:] + + if quote.find('div', attrs={'class': 'unavailable-quote'}): + newPost["replyingUser"] = "Unavailable" else: - newPost["username"] = newPost["op"] - newPost["isRT"] = False - - newPost["profilePic"] = NITTERINSTANCE+post.find('a', attrs={'class':'tweet-avatar'}).find('img')['src'][1:] - newPost["url"] = NITTERINSTANCE + post.find('a', attrs={'class':'tweet-link'})['href'][1:] - if post.find('div', attrs={'class':'quote'}): - newPost["isReply"] = True - quote = post.find('div', attrs={'class':'quote'}) - if quote.find('div', attrs={'class':'quote-text'}): - newPost["replyingTweetContent"] = Markup(quote.find('div', attrs={'class':'quote-text'})) - - if quote.find('a', attrs={'class':'still-image'}): - newPost["replyAttachedImg"] = NITTERINSTANCE+quote.find('a', attrs={'class':'still-image'})['href'][1:] - - if quote.find('div', attrs={'class':'unavailable-quote'}): - newPost["replyingUser"]="Unavailable" - else: - try: - newPost["replyingUser"]=quote.find('a', attrs={'class':'username'}).text - except: - newPost["replyingUser"]="Unavailable" - post.find('div', attrs={'class':'quote'}).decompose() + try: + newPost["replyingUser"] = quote.find('a', attrs={'class': 'username'}).text + except: + newPost["replyingUser"] = "Unavailable" + post.find('div', attrs={'class': 'quote'}).decompose() - if post.find('div', attrs={'class':'attachments'}): - if not post.find(class_='quote'): - if post.find('div', attrs={'class':'attachments'}).find('a', attrs={'class':'still-image'}): - newPost["attachedImg"] = NITTERINSTANCE + post.find('div', attrs={'class':'attachments'}).find('a')['href'][1:] - feedPosts.append(newPost) + if post.find('div', attrs={'class': 'attachments'}): + if not post.find(class_='quote'): + if post.find('div', attrs={'class': 'attachments'}).find('a', + attrs={'class': 'still-image'}): + newPost["attachedImg"] = NITTERINSTANCE + \ + post.find('div', attrs={'class': 'attachments'}).find('a')[ + 'href'][1:] + feedPosts.append(newPost) return feedPosts + def getPosts(account): feedPosts = [] - - #Gather profile info. + + # Gather profile info. rssFeed = urllib.request.urlopen('{instance}{user}'.format(instance=NITTERINSTANCE, user=account)).read() - #Gather feedPosts + # Gather feedPosts res = rssFeed.decode('utf-8') html = BeautifulSoup(res, "html.parser") - userFeed = html.find_all('div', attrs={'class':'timeline-item'}) + userFeed = html.find_all('div', attrs={'class': 'timeline-item'}) if userFeed != []: - for post in userFeed[:-1]: - date_time_str = post.find('span', attrs={'class':'tweet-date'}).find('a')['title'].replace(",","") + for post in userFeed[:-1]: + date_time_str = post.find('span', attrs={'class': 'tweet-date'}).find('a')['title'].replace(",", "") - if post.find('div', attrs={'class':'pinned'}): - if post.find('div', attrs={'class':'pinned'}).find('span', attrs={'icon-pin'}): - continue + if post.find('div', attrs={'class': 'pinned'}): + if post.find('div', attrs={'class': 'pinned'}).find('span', attrs={'icon-pin'}): + continue - newPost = twitterPost() - newPost.op = post.find('a', attrs={'class':'username'}).text - newPost.twitterName = post.find('a', attrs={'class':'fullname'}).text - newPost.timeStamp = datetime.datetime.strptime(date_time_str, '%d/%m/%Y %H:%M:%S') - newPost.date = post.find('span', attrs={'class':'tweet-date'}).find('a').text - newPost.content = Markup(post.find('div', attrs={'class':'tweet-content'})) - - if post.find('div', attrs={'class':'retweet-header'}): - newPost.username = post.find('div', attrs={'class':'retweet-header'}).find('div', attrs={'class':'icon-container'}).text - newPost.isRT = True - else: - newPost.username = newPost.op - newPost.isRT = False - - newPost.profilePic = NITTERINSTANCE+post.find('a', attrs={'class':'tweet-avatar'}).find('img')['src'][1:] - newPost.url = NITTERINSTANCE + post.find('a', attrs={'class':'tweet-link'})['href'][1:] - if post.find('div', attrs={'class':'quote'}): - newPost.isReply = True - quote = post.find('div', attrs={'class':'quote'}) - if quote.find('div', attrs={'class':'quote-text'}): - newPost.replyingTweetContent = Markup(quote.find('div', attrs={'class':'quote-text'})) - - if quote.find('a', attrs={'class':'still-image'}): - newPost.replyAttachedImg = NITTERINSTANCE+quote.find('a', attrs={'class':'still-image'})['href'][1:] - - try: - newPost.replyingUser=quote.find('a', attrs={'class':'username'}).text - except: - newPost.replyingUser="Unavailable" - post.find('div', attrs={'class':'quote'}).decompose() + newPost = twitterPost() + newPost.op = post.find('a', attrs={'class': 'username'}).text + newPost.twitterName = post.find('a', attrs={'class': 'fullname'}).text + newPost.timeStamp = datetime.datetime.strptime(date_time_str, '%d/%m/%Y %H:%M:%S') + newPost.date = post.find('span', attrs={'class': 'tweet-date'}).find('a').text + newPost.content = Markup(post.find('div', attrs={'class': 'tweet-content'})) - if post.find('div', attrs={'class':'attachments'}): - if not post.find(class_='quote'): - if post.find('div', attrs={'class':'attachments'}).find('a', attrs={'class':'still-image'}): - newPost.attachedImg = NITTERINSTANCE + post.find('div', attrs={'class':'attachments'}).find('a')['href'][1:] - feedPosts.append(newPost) + if post.find('div', attrs={'class': 'retweet-header'}): + newPost.username = post.find('div', attrs={'class': 'retweet-header'}).find('div', attrs={ + 'class': 'icon-container'}).text + newPost.isRT = True + else: + newPost.username = newPost.op + newPost.isRT = False + + newPost.profilePic = NITTERINSTANCE + post.find('a', attrs={'class': 'tweet-avatar'}).find('img')['src'][1:] + newPost.url = NITTERINSTANCE + post.find('a', attrs={'class': 'tweet-link'})['href'][1:] + if post.find('div', attrs={'class': 'quote'}): + newPost.isReply = True + quote = post.find('div', attrs={'class': 'quote'}) + if quote.find('div', attrs={'class': 'quote-text'}): + newPost.replyingTweetContent = Markup(quote.find('div', attrs={'class': 'quote-text'})) + + if quote.find('a', attrs={'class': 'still-image'}): + newPost.replyAttachedImg = NITTERINSTANCE + quote.find('a', attrs={'class': 'still-image'})['href'][ + 1:] + + try: + newPost.replyingUser = quote.find('a', attrs={'class': 'username'}).text + except: + newPost.replyingUser = "Unavailable" + post.find('div', attrs={'class': 'quote'}).decompose() + + if post.find('div', attrs={'class': 'attachments'}): + if not post.find(class_='quote'): + if post.find('div', attrs={'class': 'attachments'}).find('a', attrs={'class': 'still-image'}): + newPost.attachedImg = NITTERINSTANCE + \ + post.find('div', attrs={'class': 'attachments'}).find('a')['href'][1:] + feedPosts.append(newPost) return feedPosts + def getYoutubePosts(ids): videos = [] with FuturesSession() as session: - futures = [session.get('https://www.youtube.com/feeds/videos.xml?channel_id={id}'.format(id=id.channelId)) for id in ids] + futures = [session.get('https://www.youtube.com/feeds/videos.xml?channel_id={id}'.format(id=id.channelId)) for + id in ids] for future in as_completed(futures): resp = future.result() - rssFeed=feedparser.parse(resp.content) + rssFeed = feedparser.parse(resp.content) for vid in rssFeed.entries: try: # Try to get time diff @@ -916,13 +965,14 @@ def getYoutubePosts(ids): except: # If youtube rss does not have parsed time, generate it. Else set time to 0. try: - time = datetime.datetime.now() - datetime.datetime(datetime.datetime.strptime(vid.published, '%y-%m-%dT%H:%M:%S+00:00')) + time = datetime.datetime.now() - datetime.datetime( + datetime.datetime.strptime(vid.published, '%y-%m-%dT%H:%M:%S+00:00')) except: time = datetime.datetime.now() - datetime.datetime.now() - if time.days >=6: + if time.days >= 6: continue - + video = ytPost() try: video.date = vid.published_parsed @@ -946,12 +996,13 @@ def getYoutubePosts(ids): video.videoTitle = vid.title if config['nginxVideoStream']: hostName = urllib.parse.urlparse(vid.media_thumbnail[0]['url']).netloc - video.videoThumb = vid.media_thumbnail[0]['url'].replace("https://{}".format(hostName), "").replace("hqdefault", "mqdefault")+"?host="+hostName + video.videoThumb = vid.media_thumbnail[0]['url'].replace("https://{}".format(hostName), "").replace( + "hqdefault", "mqdefault") + "?host=" + hostName else: video.videoThumb = vid.media_thumbnail[0]['url'].replace('/', '~') video.views = vid.media_statistics['views'] video.description = vid.summary_detail.value - video.description = re.sub(r'^https?:\/\/.*[\r\n]*', '', video.description[0:120]+"...", flags=re.MULTILINE) + video.description = re.sub(r'^https?:\/\/.*[\r\n]*', '', video.description[0:120] + "...", + flags=re.MULTILINE) videos.append(video) return videos - diff --git a/app/templates/_video_comment.html b/app/templates/_video_comment.html index 6cc1478..db56e64 100644 --- a/app/templates/_video_comment.html +++ b/app/templates/_video_comment.html @@ -1,7 +1,7 @@
- {% if comment.authorIsChannelOwner %} + {% if comment.author == info.author %} {{comment.author}} {% else %} @@ -22,9 +22,6 @@ {{comment.likes}}
- {%if comment.creatorHeart != false%} - - {% endif %}
\ No newline at end of file diff --git a/app/templates/video.html b/app/templates/video.html index fd46545..55b4c9c 100644 --- a/app/templates/video.html +++ b/app/templates/video.html @@ -4,30 +4,30 @@ {% extends "base.html" %} {% block content %}
- {% if video.nginxUrl == "#" %} + {% if info.error != None or info.playability_error != None %}

ERROR WITH VIDEO

- {% elif video.isUpcoming %} + {% elif info.playability_status != None %}

SCHEDULED VIDEO

{{video.premieres}}
- {% elif video.isLive %} + {% elif info.live %}
- +
-

LIVESTREAM VIDEO

+

LIVESTREAM VIDEO

FEATURE AVAILABLE SOON

Livestreams are under developent and still not supported on Yotter.
@@ -41,11 +41,11 @@ buffered preload="none"> {% if config.nginxVideoStream %} - {% for url in urls %} - + {% for format in info.formats %} + {% if format.video_valid %} + + {% endif %} {% endfor %} - {% else %} - {% endif %}
@@ -53,55 +53,54 @@
-

{{video.title}}

+

{{info.title}}

-

{{video.viewCount}}

+

{{info.view_count}}

- {% if video.averageRating | int > 49 %} -

{{video.averageRating[0:4]}}%

+ {% if info.rating | int > 49 %} +

{{info.rating}}%

{% else %} -

{{video.averageRating[0:4]}}%

+

{{info.rating}}%

{% endif %}
-

Play Only Audio

+

Audio Only

-

{{video.description}}

+

{{info.description}}

- {% if comments != False %}

Comments

- {% for comment in video.comments %} + {% for comment in videocomments %} {% include '_video_comment.html' %} {% endfor %}
- {%endif%} - -{% if video.isLive %} +{% if info.live %} +

Active

+