Merge pull request #179 from toh995/add-f-strings
Refactor to use f-strings
This commit is contained in:
commit
d3d67858a6
@ -27,7 +27,7 @@ class User(UserMixin, db.Model):
|
|||||||
posts = db.relationship('Post', backref='author', lazy='dynamic')
|
posts = db.relationship('Post', backref='author', lazy='dynamic')
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '<User {}>'.format(self.username)
|
return f'<User {self.username}>'
|
||||||
|
|
||||||
def set_last_seen(self):
|
def set_last_seen(self):
|
||||||
self.last_seen = datetime.utcnow()
|
self.last_seen = datetime.utcnow()
|
||||||
@ -153,7 +153,7 @@ class youtubeFollow(db.Model):
|
|||||||
back_populates="youtubeFollowed")
|
back_populates="youtubeFollowed")
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '<youtubeFollow {}>'.format(self.channelName)
|
return f'<youtubeFollow {self.channelName}>'
|
||||||
|
|
||||||
class twitterFollow(db.Model):
|
class twitterFollow(db.Model):
|
||||||
__tablename__ = 'twitterAccount'
|
__tablename__ = 'twitterAccount'
|
||||||
@ -164,7 +164,7 @@ class twitterFollow(db.Model):
|
|||||||
back_populates="twitterFollowed")
|
back_populates="twitterFollowed")
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '<twitterFollow {}>'.format(self.username)
|
return f'<twitterFollow {self.username}>'
|
||||||
|
|
||||||
class Post(db.Model):
|
class Post(db.Model):
|
||||||
id = db.Column(db.Integer, primary_key=True)
|
id = db.Column(db.Integer, primary_key=True)
|
||||||
@ -175,5 +175,4 @@ class Post(db.Model):
|
|||||||
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
|
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '<Post {}>'.format(self.body)
|
return f'<Post {self.body}>'
|
||||||
|
|
||||||
|
@ -91,7 +91,7 @@ def twitter(page=0):
|
|||||||
followList.append(f.username)
|
followList.append(f.username)
|
||||||
posts = []
|
posts = []
|
||||||
|
|
||||||
cache_file = glob.glob("app/cache/{}_*".format(current_user.username))
|
cache_file = glob.glob(f"app/cache/{current_user.username}_*")
|
||||||
if (len(cache_file) > 0):
|
if (len(cache_file) > 0):
|
||||||
time_diff = round(time.time() - os.path.getmtime(cache_file[0]))
|
time_diff = round(time.time() - os.path.getmtime(cache_file[0]))
|
||||||
else:
|
else:
|
||||||
@ -103,20 +103,20 @@ def twitter(page=0):
|
|||||||
for f in cache_file:
|
for f in cache_file:
|
||||||
os.remove(f)
|
os.remove(f)
|
||||||
feed = nitterfeed.get_feed(followList)
|
feed = nitterfeed.get_feed(followList)
|
||||||
cache_file = "{u}_{d}.json".format(u=current_user.username, d=time.strftime("%Y%m%d-%H%M%S"))
|
cache_file = f"{current_user.username}_{time.strftime('%Y%m%d-%H%M%S')}.json"
|
||||||
with open("app/cache/{}".format(cache_file), 'w') as fp:
|
with open(f"app/cache/{cache_file}", 'w') as fp:
|
||||||
json.dump(feed, fp)
|
json.dump(feed, fp)
|
||||||
|
|
||||||
# Else, refresh feed
|
# Else, refresh feed
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
cache_file = glob.glob("app/cache/{}*".format(current_user.username))[0]
|
cache_file = glob.glob(f"app/cache/{current_user.username}*")[0]
|
||||||
with open(cache_file, 'r') as fp:
|
with open(cache_file, 'r') as fp:
|
||||||
feed = json.load(fp)
|
feed = json.load(fp)
|
||||||
except:
|
except:
|
||||||
feed = nitterfeed.get_feed(followList)
|
feed = nitterfeed.get_feed(followList)
|
||||||
cache_file = "{u}_{d}.json".format(u=current_user.username, d=time.strftime("%Y%m%d-%H%M%S"))
|
cache_file = f"{current_user.username}_{time.strftime('%Y%m%d-%H%M%S')}.json"
|
||||||
with open("app/cache/{}".format(cache_file), 'w') as fp:
|
with open(f"app/cache/{cache_file}", 'w') as fp:
|
||||||
json.dump(feed, fp)
|
json.dump(feed, fp)
|
||||||
|
|
||||||
posts.extend(feed)
|
posts.extend(feed)
|
||||||
@ -187,7 +187,7 @@ def follow(username):
|
|||||||
form = EmptyForm()
|
form = EmptyForm()
|
||||||
if form.validate_on_submit():
|
if form.validate_on_submit():
|
||||||
if followTwitterAccount(username):
|
if followTwitterAccount(username):
|
||||||
flash("{} followed!".format(username))
|
flash(f"{username} followed!")
|
||||||
return redirect(request.referrer)
|
return redirect(request.referrer)
|
||||||
|
|
||||||
|
|
||||||
@ -202,7 +202,7 @@ def followTwitterAccount(username):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
return True
|
return True
|
||||||
except:
|
except:
|
||||||
flash("Twitter: Couldn't follow {}. Already followed?".format(username))
|
flash(f"Twitter: Couldn't follow {username}. Already followed?")
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
flash("Something went wrong... try again")
|
flash("Something went wrong... try again")
|
||||||
@ -215,7 +215,7 @@ def unfollow(username):
|
|||||||
form = EmptyForm()
|
form = EmptyForm()
|
||||||
if form.validate_on_submit():
|
if form.validate_on_submit():
|
||||||
if twUnfollow(username):
|
if twUnfollow(username):
|
||||||
flash("{} unfollowed!".format(username))
|
flash(f"{username} unfollowed!")
|
||||||
return redirect(request.referrer)
|
return redirect(request.referrer)
|
||||||
|
|
||||||
|
|
||||||
@ -248,7 +248,7 @@ def search():
|
|||||||
if results:
|
if results:
|
||||||
return render_template('search.html', form=form, results=results, config=config)
|
return render_template('search.html', form=form, results=results, config=config)
|
||||||
else:
|
else:
|
||||||
flash("User {} not found...".format(user))
|
flash(f"User {user} not found...")
|
||||||
return redirect(request.referrer)
|
return redirect(request.referrer)
|
||||||
else:
|
else:
|
||||||
return render_template('search.html', form=form, config=config)
|
return render_template('search.html', form=form, config=config)
|
||||||
@ -262,7 +262,7 @@ def u(username, page=1):
|
|||||||
if username == "favicon.ico":
|
if username == "favicon.ico":
|
||||||
return redirect(url_for('static', filename='favicons/favicon.ico'))
|
return redirect(url_for('static', filename='favicons/favicon.ico'))
|
||||||
form = EmptyForm()
|
form = EmptyForm()
|
||||||
avatarPath = "img/avatars/{}.png".format(str(random.randint(1, 12)))
|
avatarPath = f"img/avatars/{str(random.randint(1, 12))}.png"
|
||||||
user = nitteruser.get_user_info(username)
|
user = nitteruser.get_user_info(username)
|
||||||
if not user:
|
if not user:
|
||||||
flash("This user is not on Twitter.")
|
flash("This user is not on Twitter.")
|
||||||
@ -300,7 +300,7 @@ def youtube():
|
|||||||
videos = getYoutubePosts(ids)
|
videos = getYoutubePosts(ids)
|
||||||
if videos:
|
if videos:
|
||||||
videos.sort(key=lambda x: x.date, reverse=True)
|
videos.sort(key=lambda x: x.date, reverse=True)
|
||||||
print("--- {} seconds fetching youtube feed---".format(time.time() - start_time))
|
print(f"--- {time.time() - start_time} seconds fetching youtube feed---")
|
||||||
return render_template('youtube.html', title="Yotter | Youtube", videos=videos, followCount=followCount,
|
return render_template('youtube.html', title="Yotter | Youtube", videos=videos, followCount=followCount,
|
||||||
config=config)
|
config=config)
|
||||||
|
|
||||||
@ -337,22 +337,21 @@ def ytsearch():
|
|||||||
filters = {"time": 0, "type": 0, "duration": 0}
|
filters = {"time": 0, "type": 0, "duration": 0}
|
||||||
results = yts.search_by_terms(query, page, autocorrect, sort, filters)
|
results = yts.search_by_terms(query, page, autocorrect, sort, filters)
|
||||||
|
|
||||||
next_page = "/ytsearch?q={q}&s={s}&p={p}".format(q=query, s=sort, p=int(page) + 1)
|
next_page = f"/ytsearch?q={query}&s={sort}&p={int(page)+1}"
|
||||||
if int(page) == 1:
|
if int(page) == 1:
|
||||||
prev_page = "/ytsearch?q={q}&s={s}&p={p}".format(q=query, s=sort, p=1)
|
prev_page = f"/ytsearch?q={query}&s={sort}&p={1}"
|
||||||
else:
|
else:
|
||||||
prev_page = "/ytsearch?q={q}&s={s}&p={p}".format(q=query, s=sort, p=int(page) - 1)
|
prev_page = f"/ytsearch?q={query}&s={sort}&p={int(page)-1}"
|
||||||
|
|
||||||
for video in results['videos']:
|
for video in results['videos']:
|
||||||
hostname = urllib.parse.urlparse(video['videoThumb']).netloc
|
hostname = urllib.parse.urlparse(video['videoThumb']).netloc
|
||||||
video['videoThumb'] = video['videoThumb'].replace("https://{}".format(hostname), "") + "&host=" + hostname
|
video['videoThumb'] = video['videoThumb'].replace(f"https://{hostname}", "") + "&host=" + hostname
|
||||||
|
|
||||||
for channel in results['channels']:
|
for channel in results['channels']:
|
||||||
if config['isInstance']:
|
if config['isInstance']:
|
||||||
channel['thumbnail'] = channel['thumbnail'].replace("~", "/")
|
channel['thumbnail'] = channel['thumbnail'].replace("~", "/")
|
||||||
hostName = urllib.parse.urlparse(channel['thumbnail']).netloc
|
hostName = urllib.parse.urlparse(channel['thumbnail']).netloc
|
||||||
channel['thumbnail'] = channel['thumbnail'].replace("https://{}".format(hostName),
|
channel['thumbnail'] = channel['thumbnail'].replace(f"https://{hostName}", "") + "?host=" + hostName
|
||||||
"") + "?host=" + hostName
|
|
||||||
return render_template('ytsearch.html', form=form, btform=button_form, results=results,
|
return render_template('ytsearch.html', form=form, btform=button_form, results=results,
|
||||||
restricted=config['restrictPublicUsage'], config=config, npage=next_page,
|
restricted=config['restrictPublicUsage'], config=config, npage=next_page,
|
||||||
ppage=prev_page)
|
ppage=prev_page)
|
||||||
@ -380,7 +379,7 @@ def followYoutubeChannel(channelId):
|
|||||||
follow.followers.append(current_user)
|
follow.followers.append(current_user)
|
||||||
db.session.add(follow)
|
db.session.add(follow)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
flash("{} followed!".format(channelData['channel_name']))
|
flash(f"{channelData['channel_name']} followed!")
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
@ -388,8 +387,8 @@ def followYoutubeChannel(channelId):
|
|||||||
print(e)
|
print(e)
|
||||||
return False
|
return False
|
||||||
except KeyError as ke:
|
except KeyError as ke:
|
||||||
print("KeyError: {}:'{}' could not be found".format(ke, channelId))
|
print(f"KeyError: {ke}:'{channelId}' could not be found")
|
||||||
flash("Youtube: ChannelId '{}' is not valid".format(channelId))
|
flash(f"Youtube: ChannelId '{channelId}' is not valid")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@ -410,7 +409,7 @@ def unfollowYoutubeChannel(channelId):
|
|||||||
if channel:
|
if channel:
|
||||||
db.session.delete(channel)
|
db.session.delete(channel)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
flash("{} unfollowed!".format(name))
|
flash(f"{name} unfollowed!")
|
||||||
except:
|
except:
|
||||||
flash("There was an error unfollowing the user. Try again.")
|
flash("There was an error unfollowing the user. Try again.")
|
||||||
|
|
||||||
@ -435,22 +434,22 @@ def channel(id):
|
|||||||
for video in data['items']:
|
for video in data['items']:
|
||||||
if config['isInstance']:
|
if config['isInstance']:
|
||||||
hostName = urllib.parse.urlparse(video['thumbnail'][1:]).netloc
|
hostName = urllib.parse.urlparse(video['thumbnail'][1:]).netloc
|
||||||
video['thumbnail'] = video['thumbnail'].replace("https://{}".format(hostName), "")[1:].replace("hqdefault",
|
video['thumbnail'] = video['thumbnail'].replace(f"https://{hostName}", "")[1:].replace("hqdefault",
|
||||||
"mqdefault") + "&host=" + hostName
|
"mqdefault") + "&host=" + hostName
|
||||||
else:
|
else:
|
||||||
video['thumbnail'] = video['thumbnail'].replace('/', '~')
|
video['thumbnail'] = video['thumbnail'].replace('/', '~')
|
||||||
|
|
||||||
if config['isInstance']:
|
if config['isInstance']:
|
||||||
hostName = urllib.parse.urlparse(data['avatar'][1:]).netloc
|
hostName = urllib.parse.urlparse(data['avatar'][1:]).netloc
|
||||||
data['avatar'] = data['avatar'].replace("https://{}".format(hostName), "")[1:] + "?host=" + hostName
|
data['avatar'] = data['avatar'].replace(f"https://{hostName}", "")[1:] + "?host=" + hostName
|
||||||
else:
|
else:
|
||||||
data['avatar'] = data['avatar'].replace('/', '~')
|
data['avatar'] = data['avatar'].replace('/', '~')
|
||||||
|
|
||||||
next_page = "/channel/{q}?s={s}&p={p}".format(q=id, s=sort, p=int(page) + 1)
|
next_page = f"/channel/{id}?s={sort}&p={int(page)+1}"
|
||||||
if int(page) == 1:
|
if int(page) == 1:
|
||||||
prev_page = "/channel/{q}?s={s}&p={p}".format(q=id, s=sort, p=1)
|
prev_page = f"/channel/{id}?s={sort}&p={1}"
|
||||||
else:
|
else:
|
||||||
prev_page = "/channel/{q}?s={s}&p={p}".format(q=id, s=sort, p=int(page) - 1)
|
prev_page = f"/channel/{id}?s={sort}&p={int(page)-1}"
|
||||||
|
|
||||||
return render_template('channel.html', form=form, btform=button_form, data=data,
|
return render_template('channel.html', form=form, btform=button_form, data=data,
|
||||||
restricted=config['restrictPublicUsage'], config=config, next_page=next_page,
|
restricted=config['restrictPublicUsage'], config=config, next_page=next_page,
|
||||||
@ -488,11 +487,11 @@ def watch():
|
|||||||
if info['error'] == False:
|
if info['error'] == False:
|
||||||
for format in info['formats']:
|
for format in info['formats']:
|
||||||
hostName = urllib.parse.urlparse(format['url']).netloc
|
hostName = urllib.parse.urlparse(format['url']).netloc
|
||||||
format['url'] = format['url'].replace("https://{}".format(hostName), "") + "&host=" + hostName
|
format['url'] = format['url'].replace(f"https://{hostName}", "") + "&host=" + hostName
|
||||||
|
|
||||||
for format in info['audio_formats']:
|
for format in info['audio_formats']:
|
||||||
hostName = urllib.parse.urlparse(format['url']).netloc
|
hostName = urllib.parse.urlparse(format['url']).netloc
|
||||||
format['url'] = format['url'].replace("https://{}".format(hostName), "") + "&host=" + hostName
|
format['url'] = format['url'].replace(f"https://{hostName}", "") + "&host=" + hostName
|
||||||
|
|
||||||
# Markup description
|
# Markup description
|
||||||
try:
|
try:
|
||||||
@ -804,7 +803,7 @@ def status():
|
|||||||
|
|
||||||
@app.route('/error/<errno>')
|
@app.route('/error/<errno>')
|
||||||
def error(errno):
|
def error(errno):
|
||||||
return render_template('{}.html'.format(str(errno)), config=config)
|
return render_template(f'{str(errno)}.html', config=config)
|
||||||
|
|
||||||
|
|
||||||
def getTimeDiff(t):
|
def getTimeDiff(t):
|
||||||
@ -812,24 +811,26 @@ def getTimeDiff(t):
|
|||||||
|
|
||||||
if diff.days == 0:
|
if diff.days == 0:
|
||||||
if diff.seconds > 3599:
|
if diff.seconds > 3599:
|
||||||
timeString = "{}h".format(int((diff.seconds / 60) / 60))
|
num = int((diff.seconds / 60) / 60)
|
||||||
|
timeString = f"{num}h"
|
||||||
else:
|
else:
|
||||||
timeString = "{}m".format(int(diff.seconds / 60))
|
num = int(diff.seconds / 60)
|
||||||
|
timeString = f"{num}m"
|
||||||
else:
|
else:
|
||||||
timeString = "{}d".format(diff.days)
|
timeString = f"{diff.days}d"
|
||||||
return timeString
|
return timeString
|
||||||
|
|
||||||
|
|
||||||
def isTwitterUser(username):
|
def isTwitterUser(username):
|
||||||
response = requests.get('{instance}{user}/rss'.format(instance=NITTERINSTANCE, user=username))
|
response = requests.get(f'{NITTERINSTANCE}{username}/rss')
|
||||||
if response.status_code == 404:
|
if response.status_code == 404:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def twitterUserSearch(terms):
|
def twitterUserSearch(terms):
|
||||||
response = urllib.request.urlopen(
|
url = f'{NITTERINSTANCE}search?f=users&q={urllib.parse.quote(terms)}'
|
||||||
'{instance}search?f=users&q={user}'.format(instance=NITTERINSTANCE, user=urllib.parse.quote(terms))).read()
|
response = urllib.request.urlopen(url).read()
|
||||||
html = BeautifulSoup(str(response), "lxml")
|
html = BeautifulSoup(str(response), "lxml")
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
@ -843,14 +844,14 @@ def twitterUserSearch(terms):
|
|||||||
'unicode_escape').encode('latin_1').decode('utf8'),
|
'unicode_escape').encode('latin_1').decode('utf8'),
|
||||||
"username": item.find('a', attrs={'class': 'username'}).getText().encode('latin_1').decode(
|
"username": item.find('a', attrs={'class': 'username'}).getText().encode('latin_1').decode(
|
||||||
'unicode_escape').encode('latin_1').decode('utf8'),
|
'unicode_escape').encode('latin_1').decode('utf8'),
|
||||||
'avatar': "{i}{s}".format(i=NITTERINSTANCE, s=item.find('img', attrs={'class': 'avatar'})['src'][1:])
|
'avatar': NITTERINSTANCE + item.find('img', attrs={'class': 'avatar'})['src'][1:],
|
||||||
}
|
}
|
||||||
results.append(user)
|
results.append(user)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def getTwitterUserInfo(username):
|
def getTwitterUserInfo(username):
|
||||||
response = urllib.request.urlopen('{instance}{user}'.format(instance=NITTERINSTANCE, user=username)).read()
|
response = urllib.request.urlopen('{NITTERINSTANCE}{username}').read()
|
||||||
# rssFeed = feedparser.parse(response.content)
|
# rssFeed = feedparser.parse(response.content)
|
||||||
|
|
||||||
html = BeautifulSoup(str(response), "lxml")
|
html = BeautifulSoup(str(response), "lxml")
|
||||||
@ -881,9 +882,7 @@ def getTwitterUserInfo(username):
|
|||||||
"followers": numerize.numerize(
|
"followers": numerize.numerize(
|
||||||
int(html.find_all('span', attrs={'class': 'profile-stat-num'})[2].string.replace(",", ""))),
|
int(html.find_all('span', attrs={'class': 'profile-stat-num'})[2].string.replace(",", ""))),
|
||||||
"likes": html.find_all('span', attrs={'class': 'profile-stat-num'})[3].string,
|
"likes": html.find_all('span', attrs={'class': 'profile-stat-num'})[3].string,
|
||||||
"profilePic": "{instance}{pic}".format(instance=NITTERINSTANCE,
|
"profilePic": NITTERINSTANCE + html.find('a', attrs={'class': 'profile-card-avatar'})['href'][1:],
|
||||||
pic=html.find('a', attrs={'class': 'profile-card-avatar'})['href'][
|
|
||||||
1:])
|
|
||||||
}
|
}
|
||||||
return user
|
return user
|
||||||
|
|
||||||
@ -891,7 +890,7 @@ def getTwitterUserInfo(username):
|
|||||||
def getFeed(urls):
|
def getFeed(urls):
|
||||||
feedPosts = []
|
feedPosts = []
|
||||||
with FuturesSession() as session:
|
with FuturesSession() as session:
|
||||||
futures = [session.get('{instance}{user}'.format(instance=NITTERINSTANCE, user=u.username)) for u in urls]
|
futures = [session.get(f'{NITTERINSTANCE}{u.username}') for u in urls]
|
||||||
for future in as_completed(futures):
|
for future in as_completed(futures):
|
||||||
res= future.result().content
|
res= future.result().content
|
||||||
html = BeautifulSoup(res, "html.parser")
|
html = BeautifulSoup(res, "html.parser")
|
||||||
@ -960,7 +959,7 @@ def getPosts(account):
|
|||||||
feedPosts = []
|
feedPosts = []
|
||||||
|
|
||||||
# Gather profile info.
|
# Gather profile info.
|
||||||
rssFeed = urllib.request.urlopen('{instance}{user}'.format(instance=NITTERINSTANCE, user=account)).read()
|
rssFeed = urllib.request.urlopen(f'{NITTERINSTANCE}{account}').read()
|
||||||
# Gather feedPosts
|
# Gather feedPosts
|
||||||
res = rssFeed.decode('utf-8')
|
res = rssFeed.decode('utf-8')
|
||||||
html = BeautifulSoup(res, "html.parser")
|
html = BeautifulSoup(res, "html.parser")
|
||||||
@ -1018,8 +1017,7 @@ def getPosts(account):
|
|||||||
def getYoutubePosts(ids):
|
def getYoutubePosts(ids):
|
||||||
videos = []
|
videos = []
|
||||||
with FuturesSession() as session:
|
with FuturesSession() as session:
|
||||||
futures = [session.get('https://www.youtube.com/feeds/videos.xml?channel_id={id}'.format(id=id.channelId)) for
|
futures = [session.get(f'https://www.youtube.com/feeds/videos.xml?channel_id={id.channelId}') for id in ids]
|
||||||
id in ids]
|
|
||||||
for future in as_completed(futures):
|
for future in as_completed(futures):
|
||||||
resp = future.result()
|
resp = future.result()
|
||||||
rssFeed = feedparser.parse(resp.content)
|
rssFeed = feedparser.parse(resp.content)
|
||||||
@ -1050,7 +1048,7 @@ def getYoutubePosts(ids):
|
|||||||
video.timeStamp = getTimeDiff(vid.published_parsed)
|
video.timeStamp = getTimeDiff(vid.published_parsed)
|
||||||
except:
|
except:
|
||||||
if time != 0:
|
if time != 0:
|
||||||
video.timeStamp = "{} days".format(str(time.days))
|
video.timeStamp = f"{str(time.days)} days"
|
||||||
else:
|
else:
|
||||||
video.timeStamp = "Unknown"
|
video.timeStamp = "Unknown"
|
||||||
|
|
||||||
@ -1061,7 +1059,7 @@ def getYoutubePosts(ids):
|
|||||||
video.videoTitle = vid.title
|
video.videoTitle = vid.title
|
||||||
if config['isInstance']:
|
if config['isInstance']:
|
||||||
hostName = urllib.parse.urlparse(vid.media_thumbnail[0]['url']).netloc
|
hostName = urllib.parse.urlparse(vid.media_thumbnail[0]['url']).netloc
|
||||||
video.videoThumb = vid.media_thumbnail[0]['url'].replace("https://{}".format(hostName), "").replace(
|
video.videoThumb = vid.media_thumbnail[0]['url'].replace(f"https://{hostName}", "").replace(
|
||||||
"hqdefault", "mqdefault") + "?host=" + hostName
|
"hqdefault", "mqdefault") + "?host=" + hostName
|
||||||
else:
|
else:
|
||||||
video.videoThumb = vid.media_thumbnail[0]['url'].replace('/', '~')
|
video.videoThumb = vid.media_thumbnail[0]['url'].replace('/', '~')
|
||||||
|
@ -22,7 +22,7 @@ def get_feed(usernames, daysMaxOld=10, includeRT=True):
|
|||||||
'''
|
'''
|
||||||
feedTweets = []
|
feedTweets = []
|
||||||
with FuturesSession() as session:
|
with FuturesSession() as session:
|
||||||
futures = [session.get('{instance}{user}'.format(instance=config['nitterInstance'], user=u)) for u in usernames]
|
futures = [session.get(f'{config["nitterInstance"]}{u}') for u in usernames]
|
||||||
for future in as_completed(futures):
|
for future in as_completed(futures):
|
||||||
res = future.result().content.decode('utf-8')
|
res = future.result().content.decode('utf-8')
|
||||||
html = BeautifulSoup(res, "html.parser")
|
html = BeautifulSoup(res, "html.parser")
|
||||||
|
@ -19,7 +19,7 @@ config = json.load(open('yotter-config.json'))
|
|||||||
config['nitterInstance']
|
config['nitterInstance']
|
||||||
|
|
||||||
def get_user_info(username):
|
def get_user_info(username):
|
||||||
response = urllib.request.urlopen('{instance}{user}'.format(instance=config['nitterInstance'], user=username)).read()
|
response = urllib.request.urlopen(f'{config["nitterInstance"]}{username}').read()
|
||||||
#rssFeed = feedparser.parse(response.content)
|
#rssFeed = feedparser.parse(response.content)
|
||||||
|
|
||||||
html = BeautifulSoup(str(response), "lxml")
|
html = BeautifulSoup(str(response), "lxml")
|
||||||
@ -46,12 +46,12 @@ def get_user_info(username):
|
|||||||
"following":html.find_all('span', attrs={'class':'profile-stat-num'})[1].string,
|
"following":html.find_all('span', attrs={'class':'profile-stat-num'})[1].string,
|
||||||
"followers":numerize.numerize(int(html.find_all('span', attrs={'class':'profile-stat-num'})[2].string.replace(",",""))),
|
"followers":numerize.numerize(int(html.find_all('span', attrs={'class':'profile-stat-num'})[2].string.replace(",",""))),
|
||||||
"likes":html.find_all('span', attrs={'class':'profile-stat-num'})[3].string,
|
"likes":html.find_all('span', attrs={'class':'profile-stat-num'})[3].string,
|
||||||
"profilePic":"{instance}{pic}".format(instance=config['nitterInstance'], pic=html.find('a', attrs={'class':'profile-card-avatar'})['href'][1:])
|
"profilePic":config['nitterInstance'] + html.find('a', attrs={'class':'profile-card-avatar'})['href'][1:],
|
||||||
}
|
}
|
||||||
return user
|
return user
|
||||||
|
|
||||||
def get_tweets(user, page=1):
|
def get_tweets(user, page=1):
|
||||||
feed = urllib.request.urlopen('{instance}{user}'.format(instance=config['nitterInstance'], user=user)).read()
|
feed = urllib.request.urlopen(f'{config["nitterInstance"]}{user}').read()
|
||||||
#Gather feedPosts
|
#Gather feedPosts
|
||||||
res = feed.decode('utf-8')
|
res = feed.decode('utf-8')
|
||||||
html = BeautifulSoup(res, "html.parser")
|
html = BeautifulSoup(res, "html.parser")
|
||||||
@ -59,8 +59,9 @@ def get_tweets(user, page=1):
|
|||||||
|
|
||||||
if page == 2:
|
if page == 2:
|
||||||
nextPage = html.find('div', attrs={'class':'show-more'}).find('a')['href']
|
nextPage = html.find('div', attrs={'class':'show-more'}).find('a')['href']
|
||||||
print('{instance}{user}{page}'.format(instance=config['nitterInstance'], user=user, page=nextPage))
|
url = f'{config["nitterInstance"]}{user}{nextPage}'
|
||||||
feed = urllib.request.urlopen('{instance}{user}{page}'.format(instance=config['nitterInstance'], user=user, page=nextPage)).read()
|
print(url)
|
||||||
|
feed = urllib.request.urlopen(url).read()
|
||||||
res = feed.decode('utf-8')
|
res = feed.decode('utf-8')
|
||||||
html = BeautifulSoup(res, "html.parser")
|
html = BeautifulSoup(res, "html.parser")
|
||||||
feedPosts = get_feed_tweets(html)
|
feedPosts = get_feed_tweets(html)
|
||||||
|
@ -258,5 +258,3 @@ def get_channel_page_general_url(base_url, tab, request, channel_id=None):
|
|||||||
parameters_dictionary = request.args,
|
parameters_dictionary = request.args,
|
||||||
**info
|
**info
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -155,13 +155,13 @@ def get_info_grid_video_item(item, channel=None):
|
|||||||
'timeStamp':published,
|
'timeStamp':published,
|
||||||
'duration':duration,
|
'duration':duration,
|
||||||
'channelName':channel['username'],
|
'channelName':channel['username'],
|
||||||
'authorUrl':"/channel/{}".format(channel['channelId']),
|
'authorUrl':f"/channel/{channel['channelId']}",
|
||||||
'channelId':channel['channelId'],
|
'channelId':channel['channelId'],
|
||||||
'id':item['videoId'],
|
'id':item['videoId'],
|
||||||
'videoUrl':"/watch?v={}".format(item['videoId']),
|
'videoUrl':f"/watch?v={item['videoId']}",
|
||||||
'isLive':isLive,
|
'isLive':isLive,
|
||||||
'isUpcoming':isUpcoming,
|
'isUpcoming':isUpcoming,
|
||||||
'videoThumb':item['thumbnail']['thumbnails'][0]['url']
|
'videoThumb':item['thumbnail']['thumbnails'][0]['url'],
|
||||||
}
|
}
|
||||||
return video
|
return video
|
||||||
|
|
||||||
@ -172,18 +172,18 @@ def get_author_info_from_channel(content):
|
|||||||
channel = {
|
channel = {
|
||||||
"channelId": cmd['channelId'],
|
"channelId": cmd['channelId'],
|
||||||
"username": cmd['title'],
|
"username": cmd['title'],
|
||||||
"thumbnail": "https:{}".format(cmd['avatar']['thumbnails'][0]['url'].replace("/", "~")),
|
"thumbnail": f"https:{cmd['avatar']['thumbnails'][0]['url'].replace('/', '~')}",
|
||||||
"description":description,
|
"description":description,
|
||||||
"suscribers": cmd['subscriberCountText']['runs'][0]['text'].split(" ")[0],
|
"suscribers": cmd['subscriberCountText']['runs'][0]['text'].split(" ")[0],
|
||||||
"banner": cmd['banner']['thumbnails'][0]['url']
|
"banner": cmd['banner']['thumbnails'][0]['url'],
|
||||||
}
|
}
|
||||||
return channel
|
return channel
|
||||||
|
|
||||||
def get_channel_info(channelId, videos=True, page=1, sort=3):
|
def get_channel_info(channelId, videos=True, page=1, sort=3):
|
||||||
if id_or_username(channelId) == "channel":
|
if id_or_username(channelId) == "channel":
|
||||||
videos = []
|
videos = []
|
||||||
ciUrl = "https://www.youtube.com/channel/{}".format(channelId)
|
ciUrl = f"https://www.youtube.com/channel/{channelId}"
|
||||||
mainUrl = "https://www.youtube.com/browse_ajax?ctoken={}".format(channel_ctoken_desktop(channelId, page, sort, "videos"))
|
mainUrl = f"https://www.youtube.com/browse_ajax?ctoken={channel_ctoken_desktop(channelId, page, sort, 'videos')}"
|
||||||
content = json.loads(requests.get(mainUrl, headers=headers).text)
|
content = json.loads(requests.get(mainUrl, headers=headers).text)
|
||||||
req = requests.get(ciUrl, headers=headers).text
|
req = requests.get(ciUrl, headers=headers).text
|
||||||
|
|
||||||
@ -210,4 +210,4 @@ def get_channel_info(channelId, videos=True, page=1, sort=3):
|
|||||||
return {"channel":authorInfo}
|
return {"channel":authorInfo}
|
||||||
|
|
||||||
else:
|
else:
|
||||||
baseUrl = "https://www.youtube.com/user/{}".format(channelId)
|
baseUrl = f"https://www.youtube.com/user/{channelId}"
|
||||||
|
@ -159,10 +159,9 @@ def get_video_renderer_item_info(item):
|
|||||||
'authorUrl':"/channel/{}".format(item['ownerText']['runs'][0]['navigationEndpoint']['browseEndpoint']['browseId']),
|
'authorUrl':"/channel/{}".format(item['ownerText']['runs'][0]['navigationEndpoint']['browseEndpoint']['browseId']),
|
||||||
'channelId':item['ownerText']['runs'][0]['navigationEndpoint']['browseEndpoint']['browseId'],
|
'channelId':item['ownerText']['runs'][0]['navigationEndpoint']['browseEndpoint']['browseId'],
|
||||||
'id':item['videoId'],
|
'id':item['videoId'],
|
||||||
'videoUrl':"/watch?v={}".format(item['videoId']),
|
'videoUrl':f"/watch?v={item['videoId']}",
|
||||||
'isLive':isLive,
|
'isLive':isLive,
|
||||||
'isUpcoming':isUpcoming,
|
'isUpcoming':isUpcoming,
|
||||||
'videoThumb':item['thumbnail']['thumbnails'][0]['url']
|
'videoThumb':item['thumbnail']['thumbnails'][0]['url'],
|
||||||
}
|
}
|
||||||
return video
|
return video
|
||||||
|
|
||||||
|
@ -394,4 +394,3 @@ def check_gevent_exceptions(*tasks):
|
|||||||
for task in tasks:
|
for task in tasks:
|
||||||
if task.exception:
|
if task.exception:
|
||||||
raise task.exception
|
raise task.exception
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ def parse_comment(raw_comment):
|
|||||||
cmnt = {}
|
cmnt = {}
|
||||||
imgHostName = urllib.parse.urlparse(raw_comment['author_avatar'][1:]).netloc
|
imgHostName = urllib.parse.urlparse(raw_comment['author_avatar'][1:]).netloc
|
||||||
cmnt['author'] = raw_comment['author']
|
cmnt['author'] = raw_comment['author']
|
||||||
cmnt['thumbnail'] = raw_comment['author_avatar'].replace("https://{}".format(imgHostName),"")[1:] + "?host=" + imgHostName
|
cmnt['thumbnail'] = raw_comment['author_avatar'].replace(f"https://{imgHostName}","")[1:] + "?host=" + imgHostName
|
||||||
|
|
||||||
print(cmnt['thumbnail'])
|
print(cmnt['thumbnail'])
|
||||||
cmnt['channel'] = raw_comment['author_url']
|
cmnt['channel'] = raw_comment['author_url']
|
||||||
|
@ -266,5 +266,3 @@ def format_bytes(bytes):
|
|||||||
suffix = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'][exponent]
|
suffix = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB'][exponent]
|
||||||
converted = float(bytes) / float(1024 ** exponent)
|
converted = float(bytes) / float(1024 ** exponent)
|
||||||
return '%.2f%s' % (converted, suffix)
|
return '%.2f%s' % (converted, suffix)
|
||||||
|
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user