mirror of
https://github.com/TeamNewPipe/NewPipeExtractor.git
synced 2025-01-07 10:00:34 +05:30
Merge pull request #254 from TeamNewPipe/formatting
Improve code formatting and optimize imports
This commit is contained in:
commit
4bc5b8d33f
@ -1,10 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor;
|
package org.schabi.newpipe.extractor;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
|
||||||
import javax.annotation.Nullable;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
@ -13,7 +8,11 @@ import org.schabi.newpipe.extractor.localization.ContentCountry;
|
|||||||
import org.schabi.newpipe.extractor.localization.Localization;
|
import org.schabi.newpipe.extractor.localization.Localization;
|
||||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||||
|
|
||||||
public abstract class Extractor{
|
import javax.annotation.Nonnull;
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public abstract class Extractor {
|
||||||
/**
|
/**
|
||||||
* {@link StreamingService} currently related to this extractor.<br>
|
* {@link StreamingService} currently related to this extractor.<br>
|
||||||
* Useful for getting other things from a service (like the url handlers for cleaning/accepting/get id from urls).
|
* Useful for getting other things from a service (like the url handlers for cleaning/accepting/get id from urls).
|
||||||
@ -21,19 +20,21 @@ public abstract class Extractor{
|
|||||||
private final StreamingService service;
|
private final StreamingService service;
|
||||||
private final LinkHandler linkHandler;
|
private final LinkHandler linkHandler;
|
||||||
|
|
||||||
@Nullable private Localization forcedLocalization = null;
|
@Nullable
|
||||||
@Nullable private ContentCountry forcedContentCountry = null;
|
private Localization forcedLocalization = null;
|
||||||
|
@Nullable
|
||||||
|
private ContentCountry forcedContentCountry = null;
|
||||||
|
|
||||||
private boolean pageFetched = false;
|
private boolean pageFetched = false;
|
||||||
private final Downloader downloader;
|
private final Downloader downloader;
|
||||||
|
|
||||||
public Extractor(final StreamingService service, final LinkHandler linkHandler) {
|
public Extractor(final StreamingService service, final LinkHandler linkHandler) {
|
||||||
if(service == null) throw new NullPointerException("service is null");
|
if (service == null) throw new NullPointerException("service is null");
|
||||||
if(linkHandler == null) throw new NullPointerException("LinkHandler is null");
|
if (linkHandler == null) throw new NullPointerException("LinkHandler is null");
|
||||||
this.service = service;
|
this.service = service;
|
||||||
this.linkHandler = linkHandler;
|
this.linkHandler = linkHandler;
|
||||||
this.downloader = NewPipe.getDownloader();
|
this.downloader = NewPipe.getDownloader();
|
||||||
if(downloader == null) throw new NullPointerException("downloader is null");
|
if (downloader == null) throw new NullPointerException("downloader is null");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -46,11 +47,12 @@ public abstract class Extractor{
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch the current page.
|
* Fetch the current page.
|
||||||
|
*
|
||||||
* @throws IOException if the page can not be loaded
|
* @throws IOException if the page can not be loaded
|
||||||
* @throws ExtractionException if the pages content is not understood
|
* @throws ExtractionException if the pages content is not understood
|
||||||
*/
|
*/
|
||||||
public void fetchPage() throws IOException, ExtractionException {
|
public void fetchPage() throws IOException, ExtractionException {
|
||||||
if(pageFetched) return;
|
if (pageFetched) return;
|
||||||
onFetchPage(downloader);
|
onFetchPage(downloader);
|
||||||
pageFetched = true;
|
pageFetched = true;
|
||||||
}
|
}
|
||||||
@ -65,6 +67,7 @@ public abstract class Extractor{
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch the current page.
|
* Fetch the current page.
|
||||||
|
*
|
||||||
* @param downloader the download to use
|
* @param downloader the download to use
|
||||||
* @throws IOException if the page can not be loaded
|
* @throws IOException if the page can not be loaded
|
||||||
* @throws ExtractionException if the pages content is not understood
|
* @throws ExtractionException if the pages content is not understood
|
||||||
@ -78,6 +81,7 @@ public abstract class Extractor{
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the name
|
* Get the name
|
||||||
|
*
|
||||||
* @return the name
|
* @return the name
|
||||||
* @throws ParsingException if the name cannot be extracted
|
* @throws ParsingException if the name cannot be extracted
|
||||||
*/
|
*/
|
||||||
|
@ -27,7 +27,7 @@ import java.util.List;
|
|||||||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public abstract class InfoItemsCollector<I extends InfoItem, E extends InfoItemExtractor> implements Collector<I,E> {
|
public abstract class InfoItemsCollector<I extends InfoItem, E extends InfoItemExtractor> implements Collector<I, E> {
|
||||||
|
|
||||||
private final List<I> itemList = new ArrayList<>();
|
private final List<I> itemList = new ArrayList<>();
|
||||||
private final List<Throwable> errors = new ArrayList<>();
|
private final List<Throwable> errors = new ArrayList<>();
|
||||||
|
@ -115,19 +115,20 @@ public enum MediaFormat {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the media format by it's id.
|
* Get the media format by its id.
|
||||||
|
*
|
||||||
* @param id the id
|
* @param id the id
|
||||||
* @return the id of the media format or null.
|
* @return the id of the media format or null.
|
||||||
*/
|
*/
|
||||||
public static MediaFormat getFormatById(int id) {
|
public static MediaFormat getFormatById(int id) {
|
||||||
for (MediaFormat vf: values()) {
|
for (MediaFormat vf : values()) {
|
||||||
if (vf.id == id) return vf;
|
if (vf.id == id) return vf;
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static MediaFormat getFromSuffix(String suffix) {
|
public static MediaFormat getFromSuffix(String suffix) {
|
||||||
for (MediaFormat vf: values()) {
|
for (MediaFormat vf : values()) {
|
||||||
if (vf.suffix.equals(suffix)) return vf;
|
if (vf.suffix.equals(suffix)) return vf;
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
@ -135,6 +136,7 @@ public enum MediaFormat {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the name of the format
|
* Get the name of the format
|
||||||
|
*
|
||||||
* @return the name of the format
|
* @return the name of the format
|
||||||
*/
|
*/
|
||||||
public String getName() {
|
public String getName() {
|
||||||
@ -143,6 +145,7 @@ public enum MediaFormat {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the filename extension
|
* Get the filename extension
|
||||||
|
*
|
||||||
* @return the filename extension
|
* @return the filename extension
|
||||||
*/
|
*/
|
||||||
public String getSuffix() {
|
public String getSuffix() {
|
||||||
@ -151,6 +154,7 @@ public enum MediaFormat {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the mime type
|
* Get the mime type
|
||||||
|
*
|
||||||
* @return the mime type
|
* @return the mime type
|
||||||
*/
|
*/
|
||||||
public String getMimeType() {
|
public String getMimeType() {
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
package org.schabi.newpipe.extractor;
|
package org.schabi.newpipe.extractor;
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.services.media_ccc.MediaCCCService;
|
import org.schabi.newpipe.extractor.services.media_ccc.MediaCCCService;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.PeertubeService;
|
import org.schabi.newpipe.extractor.services.peertube.PeertubeService;
|
||||||
import org.schabi.newpipe.extractor.services.soundcloud.SoundcloudService;
|
import org.schabi.newpipe.extractor.services.soundcloud.SoundcloudService;
|
||||||
import org.schabi.newpipe.extractor.services.youtube.YoutubeService;
|
import org.schabi.newpipe.extractor.services.youtube.YoutubeService;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Copyright (C) Christian Schabesberger 2018 <chris.schabesberger@mailbox.org>
|
* Copyright (C) Christian Schabesberger 2018 <chris.schabesberger@mailbox.org>
|
||||||
* ServiceList.java is part of NewPipe.
|
* ServiceList.java is part of NewPipe.
|
||||||
|
@ -1,20 +1,12 @@
|
|||||||
package org.schabi.newpipe.extractor;
|
package org.schabi.newpipe.extractor;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.feed.FeedExtractor;
|
import org.schabi.newpipe.extractor.feed.FeedExtractor;
|
||||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
import org.schabi.newpipe.extractor.linkhandler.*;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.localization.ContentCountry;
|
import org.schabi.newpipe.extractor.localization.ContentCountry;
|
||||||
import org.schabi.newpipe.extractor.localization.Localization;
|
import org.schabi.newpipe.extractor.localization.Localization;
|
||||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||||
@ -26,6 +18,8 @@ import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
|||||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Copyright (C) Christian Schabesberger 2018 <chris.schabesberger@mailbox.org>
|
* Copyright (C) Christian Schabesberger 2018 <chris.schabesberger@mailbox.org>
|
||||||
@ -269,7 +263,7 @@ public abstract class StreamingService {
|
|||||||
|
|
||||||
public CommentsExtractor getCommentsExtractor(String url) throws ExtractionException {
|
public CommentsExtractor getCommentsExtractor(String url) throws ExtractionException {
|
||||||
ListLinkHandlerFactory llhf = getCommentsLHFactory();
|
ListLinkHandlerFactory llhf = getCommentsLHFactory();
|
||||||
if(null == llhf) {
|
if (llhf == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return getCommentsExtractor(llhf.fromUrl(url));
|
return getCommentsExtractor(llhf.fromUrl(url));
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.comments;
|
package org.schabi.newpipe.extractor.comments;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.ListExtractor.InfoItemsPage;
|
import org.schabi.newpipe.extractor.ListExtractor.InfoItemsPage;
|
||||||
import org.schabi.newpipe.extractor.ListInfo;
|
import org.schabi.newpipe.extractor.ListInfo;
|
||||||
import org.schabi.newpipe.extractor.NewPipe;
|
import org.schabi.newpipe.extractor.NewPipe;
|
||||||
@ -10,11 +8,12 @@ import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
|||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||||
import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
||||||
|
|
||||||
public class CommentsInfo extends ListInfo<CommentsInfoItem>{
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class CommentsInfo extends ListInfo<CommentsInfoItem> {
|
||||||
|
|
||||||
private CommentsInfo(int serviceId, ListLinkHandler listUrlIdHandler, String name) {
|
private CommentsInfo(int serviceId, ListLinkHandler listUrlIdHandler, String name) {
|
||||||
super(serviceId, listUrlIdHandler, name);
|
super(serviceId, listUrlIdHandler, name);
|
||||||
// TODO Auto-generated constructor stub
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static CommentsInfo getInfo(String url) throws IOException, ExtractionException {
|
public static CommentsInfo getInfo(String url) throws IOException, ExtractionException {
|
||||||
|
@ -25,7 +25,6 @@ import org.schabi.newpipe.extractor.ListExtractor;
|
|||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
|
|
||||||
|
@ -26,14 +26,17 @@ public class KioskList {
|
|||||||
private final HashMap<String, KioskEntry> kioskList = new HashMap<>();
|
private final HashMap<String, KioskEntry> kioskList = new HashMap<>();
|
||||||
private String defaultKiosk = null;
|
private String defaultKiosk = null;
|
||||||
|
|
||||||
@Nullable private Localization forcedLocalization;
|
@Nullable
|
||||||
@Nullable private ContentCountry forcedContentCountry;
|
private Localization forcedLocalization;
|
||||||
|
@Nullable
|
||||||
|
private ContentCountry forcedContentCountry;
|
||||||
|
|
||||||
private class KioskEntry {
|
private class KioskEntry {
|
||||||
public KioskEntry(KioskExtractorFactory ef, ListLinkHandlerFactory h) {
|
public KioskEntry(KioskExtractorFactory ef, ListLinkHandlerFactory h) {
|
||||||
extractorFactory = ef;
|
extractorFactory = ef;
|
||||||
handlerFactory = h;
|
handlerFactory = h;
|
||||||
}
|
}
|
||||||
|
|
||||||
final KioskExtractorFactory extractorFactory;
|
final KioskExtractorFactory extractorFactory;
|
||||||
final ListLinkHandlerFactory handlerFactory;
|
final ListLinkHandlerFactory handlerFactory;
|
||||||
}
|
}
|
||||||
@ -44,7 +47,7 @@ public class KioskList {
|
|||||||
|
|
||||||
public void addKioskEntry(KioskExtractorFactory extractorFactory, ListLinkHandlerFactory handlerFactory, String id)
|
public void addKioskEntry(KioskExtractorFactory extractorFactory, ListLinkHandlerFactory handlerFactory, String id)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
if(kioskList.get(id) != null) {
|
if (kioskList.get(id) != null) {
|
||||||
throw new Exception("Kiosk with type " + id + " already exists.");
|
throw new Exception("Kiosk with type " + id + " already exists.");
|
||||||
}
|
}
|
||||||
kioskList.put(id, new KioskEntry(extractorFactory, handlerFactory));
|
kioskList.put(id, new KioskEntry(extractorFactory, handlerFactory));
|
||||||
@ -66,10 +69,10 @@ public class KioskList {
|
|||||||
|
|
||||||
public KioskExtractor getDefaultKioskExtractor(String nextPageUrl, Localization localization)
|
public KioskExtractor getDefaultKioskExtractor(String nextPageUrl, Localization localization)
|
||||||
throws ExtractionException, IOException {
|
throws ExtractionException, IOException {
|
||||||
if(defaultKiosk != null && !defaultKiosk.equals("")) {
|
if (defaultKiosk != null && !defaultKiosk.equals("")) {
|
||||||
return getExtractorById(defaultKiosk, nextPageUrl, localization);
|
return getExtractorById(defaultKiosk, nextPageUrl, localization);
|
||||||
} else {
|
} else {
|
||||||
if(!kioskList.isEmpty()) {
|
if (!kioskList.isEmpty()) {
|
||||||
// if not set get any entry
|
// if not set get any entry
|
||||||
Object[] keySet = kioskList.keySet().toArray();
|
Object[] keySet = kioskList.keySet().toArray();
|
||||||
return getExtractorById(keySet[0].toString(), nextPageUrl, localization);
|
return getExtractorById(keySet[0].toString(), nextPageUrl, localization);
|
||||||
@ -91,7 +94,7 @@ public class KioskList {
|
|||||||
public KioskExtractor getExtractorById(String kioskId, String nextPageUrl, Localization localization)
|
public KioskExtractor getExtractorById(String kioskId, String nextPageUrl, Localization localization)
|
||||||
throws ExtractionException, IOException {
|
throws ExtractionException, IOException {
|
||||||
KioskEntry ke = kioskList.get(kioskId);
|
KioskEntry ke = kioskList.get(kioskId);
|
||||||
if(ke == null) {
|
if (ke == null) {
|
||||||
throw new ExtractionException("No kiosk found with the type: " + kioskId);
|
throw new ExtractionException("No kiosk found with the type: " + kioskId);
|
||||||
} else {
|
} else {
|
||||||
final KioskExtractor kioskExtractor = ke.extractorFactory.createNewKiosk(service,
|
final KioskExtractor kioskExtractor = ke.extractorFactory.createNewKiosk(service,
|
||||||
@ -109,15 +112,15 @@ public class KioskList {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public KioskExtractor getExtractorByUrl(String url, String nextPageUrl)
|
public KioskExtractor getExtractorByUrl(String url, String nextPageUrl)
|
||||||
throws ExtractionException, IOException{
|
throws ExtractionException, IOException {
|
||||||
return getExtractorByUrl(url, nextPageUrl, NewPipe.getPreferredLocalization());
|
return getExtractorByUrl(url, nextPageUrl, NewPipe.getPreferredLocalization());
|
||||||
}
|
}
|
||||||
|
|
||||||
public KioskExtractor getExtractorByUrl(String url, String nextPageUrl, Localization localization)
|
public KioskExtractor getExtractorByUrl(String url, String nextPageUrl, Localization localization)
|
||||||
throws ExtractionException, IOException {
|
throws ExtractionException, IOException {
|
||||||
for(Map.Entry<String, KioskEntry> e : kioskList.entrySet()) {
|
for (Map.Entry<String, KioskEntry> e : kioskList.entrySet()) {
|
||||||
KioskEntry ke = e.getValue();
|
KioskEntry ke = e.getValue();
|
||||||
if(ke.handlerFactory.acceptUrl(url)) {
|
if (ke.handlerFactory.acceptUrl(url)) {
|
||||||
return getExtractorById(ke.handlerFactory.getId(url), nextPageUrl, localization);
|
return getExtractorById(ke.handlerFactory.getId(url), nextPageUrl, localization);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
package org.schabi.newpipe.extractor.linkhandler;
|
package org.schabi.newpipe.extractor.linkhandler;
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.utils.Utils;
|
import org.schabi.newpipe.extractor.utils.Utils;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
public class LinkHandler implements Serializable {
|
public class LinkHandler implements Serializable {
|
||||||
protected final String originalUrl;
|
protected final String originalUrl;
|
||||||
protected final String url;
|
protected final String url;
|
||||||
|
@ -55,7 +55,7 @@ public abstract class LinkHandlerFactory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
final String id = getId(url);
|
final String id = getId(url);
|
||||||
return new LinkHandler(url, getUrl(id,baseUrl), id);
|
return new LinkHandler(url, getUrl(id, baseUrl), id);
|
||||||
}
|
}
|
||||||
|
|
||||||
public LinkHandler fromId(String id) throws ParsingException {
|
public LinkHandler fromId(String id) throws ParsingException {
|
||||||
|
@ -1,19 +1,25 @@
|
|||||||
package org.schabi.newpipe.extractor.linkhandler;
|
package org.schabi.newpipe.extractor.linkhandler;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.utils.Utils;
|
import org.schabi.newpipe.extractor.utils.Utils;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public abstract class ListLinkHandlerFactory extends LinkHandlerFactory {
|
public abstract class ListLinkHandlerFactory extends LinkHandlerFactory {
|
||||||
|
|
||||||
///////////////////////////////////
|
///////////////////////////////////
|
||||||
// To Override
|
// To Override
|
||||||
///////////////////////////////////
|
///////////////////////////////////
|
||||||
|
|
||||||
public List<String> getContentFilter(String url) throws ParsingException { return new ArrayList<>(0);}
|
public List<String> getContentFilter(String url) throws ParsingException {
|
||||||
public String getSortFilter(String url) throws ParsingException {return ""; }
|
return new ArrayList<>(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSortFilter(String url) throws ParsingException {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
public abstract String getUrl(String id, List<String> contentFilter, String sortFilter) throws ParsingException;
|
public abstract String getUrl(String id, List<String> contentFilter, String sortFilter) throws ParsingException;
|
||||||
|
|
||||||
public String getUrl(String id, List<String> contentFilter, String sortFilter, String baseUrl) throws ParsingException {
|
public String getUrl(String id, List<String> contentFilter, String sortFilter, String baseUrl) throws ParsingException {
|
||||||
@ -32,7 +38,7 @@ public abstract class ListLinkHandlerFactory extends LinkHandlerFactory {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ListLinkHandler fromUrl(String url, String baseUrl) throws ParsingException {
|
public ListLinkHandler fromUrl(String url, String baseUrl) throws ParsingException {
|
||||||
if(url == null) throw new IllegalArgumentException("url may not be null");
|
if (url == null) throw new IllegalArgumentException("url may not be null");
|
||||||
|
|
||||||
return new ListLinkHandler(super.fromUrl(url, baseUrl), getContentFilter(url), getSortFilter(url));
|
return new ListLinkHandler(super.fromUrl(url, baseUrl), getContentFilter(url), getSortFilter(url));
|
||||||
}
|
}
|
||||||
@ -63,8 +69,9 @@ public abstract class ListLinkHandlerFactory extends LinkHandlerFactory {
|
|||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* For makeing ListLinkHandlerFactory compatible with LinkHandlerFactory we need to override this,
|
* For making ListLinkHandlerFactory compatible with LinkHandlerFactory we need to override this,
|
||||||
* however it should not be overridden by the actual implementation.
|
* however it should not be overridden by the actual implementation.
|
||||||
|
*
|
||||||
* @param id
|
* @param id
|
||||||
* @return the url coresponding to id without any filters applied
|
* @return the url coresponding to id without any filters applied
|
||||||
*/
|
*/
|
||||||
|
@ -24,6 +24,7 @@ public class SearchQueryHandler extends ListLinkHandler {
|
|||||||
/**
|
/**
|
||||||
* Returns the search string. Since ListQIHandler is based on ListLinkHandler
|
* Returns the search string. Since ListQIHandler is based on ListLinkHandler
|
||||||
* getSearchString() is equivalent to calling getId().
|
* getSearchString() is equivalent to calling getId().
|
||||||
|
*
|
||||||
* @return the search string
|
* @return the search string
|
||||||
*/
|
*/
|
||||||
public String getSearchString() {
|
public String getSearchString() {
|
||||||
|
@ -13,14 +13,19 @@ public abstract class SearchQueryHandlerFactory extends ListLinkHandlerFactory {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public abstract String getUrl(String querry, List<String> contentFilter, String sortFilter) throws ParsingException;
|
public abstract String getUrl(String querry, List<String> contentFilter, String sortFilter) throws ParsingException;
|
||||||
public String getSearchString(String url) { return "";}
|
|
||||||
|
public String getSearchString(String url) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
///////////////////////////////////
|
///////////////////////////////////
|
||||||
// Logic
|
// Logic
|
||||||
///////////////////////////////////
|
///////////////////////////////////
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getId(String url) { return getSearchString(url); }
|
public String getId(String url) {
|
||||||
|
return getSearchString(url);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SearchQueryHandler fromQuery(String querry,
|
public SearchQueryHandler fromQuery(String querry,
|
||||||
@ -34,10 +39,13 @@ public abstract class SearchQueryHandlerFactory extends ListLinkHandlerFactory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* It's not mandatorry for NewPipe to handle the Url
|
* It's not mandatory for NewPipe to handle the Url
|
||||||
|
*
|
||||||
* @param url
|
* @param url
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean onAcceptUrl(String url) { return false; }
|
public boolean onAcceptUrl(String url) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,10 @@ package org.schabi.newpipe.extractor.localization;
|
|||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.*;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
public class Localization implements Serializable {
|
public class Localization implements Serializable {
|
||||||
public static final Localization DEFAULT = new Localization("en", "GB");
|
public static final Localization DEFAULT = new Localization("en", "GB");
|
||||||
|
@ -23,6 +23,7 @@ public class TimeAgoParser {
|
|||||||
* <p>
|
* <p>
|
||||||
* Instantiate a new {@link TimeAgoParser} every time you extract a new batch of items.
|
* Instantiate a new {@link TimeAgoParser} every time you extract a new batch of items.
|
||||||
* </p>
|
* </p>
|
||||||
|
*
|
||||||
* @param patternsHolder An object that holds the "time ago" patterns, special cases, and the language word separator.
|
* @param patternsHolder An object that holds the "time ago" patterns, special cases, and the language word separator.
|
||||||
*/
|
*/
|
||||||
public TimeAgoParser(PatternsHolder patternsHolder) {
|
public TimeAgoParser(PatternsHolder patternsHolder) {
|
||||||
@ -164,6 +165,7 @@ public class TimeAgoParser {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Marks the time as approximated by setting minutes, seconds and milliseconds to 0.
|
* Marks the time as approximated by setting minutes, seconds and milliseconds to 0.
|
||||||
|
*
|
||||||
* @param calendarTime Time to be marked as approximated
|
* @param calendarTime Time to be marked as approximated
|
||||||
*/
|
*/
|
||||||
private void markApproximatedTime(Calendar calendarTime) {
|
private void markApproximatedTime(Calendar calendarTime) {
|
||||||
|
@ -5,7 +5,6 @@ import org.schabi.newpipe.extractor.InfoItemExtractor;
|
|||||||
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItemExtractor;
|
import org.schabi.newpipe.extractor.channel.ChannelInfoItemExtractor;
|
||||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItemsCollector;
|
import org.schabi.newpipe.extractor.channel.ChannelInfoItemsCollector;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.playlist.PlaylistInfoItemExtractor;
|
import org.schabi.newpipe.extractor.playlist.PlaylistInfoItemExtractor;
|
||||||
import org.schabi.newpipe.extractor.playlist.PlaylistInfoItemsCollector;
|
import org.schabi.newpipe.extractor.playlist.PlaylistInfoItemsCollector;
|
||||||
@ -34,7 +33,7 @@ import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Collector for search results
|
* Collector for search results
|
||||||
*
|
* <p>
|
||||||
* This collector can handle the following extractor types:
|
* This collector can handle the following extractor types:
|
||||||
* <ul>
|
* <ul>
|
||||||
* <li>{@link StreamInfoItemExtractor}</li>
|
* <li>{@link StreamInfoItemExtractor}</li>
|
||||||
@ -59,11 +58,11 @@ public class InfoItemsSearchCollector extends InfoItemsCollector<InfoItem, InfoI
|
|||||||
@Override
|
@Override
|
||||||
public InfoItem extract(InfoItemExtractor extractor) throws ParsingException {
|
public InfoItem extract(InfoItemExtractor extractor) throws ParsingException {
|
||||||
// Use the corresponding collector for each item extractor type
|
// Use the corresponding collector for each item extractor type
|
||||||
if(extractor instanceof StreamInfoItemExtractor) {
|
if (extractor instanceof StreamInfoItemExtractor) {
|
||||||
return streamCollector.extract((StreamInfoItemExtractor) extractor);
|
return streamCollector.extract((StreamInfoItemExtractor) extractor);
|
||||||
} else if(extractor instanceof ChannelInfoItemExtractor) {
|
} else if (extractor instanceof ChannelInfoItemExtractor) {
|
||||||
return userCollector.extract((ChannelInfoItemExtractor) extractor);
|
return userCollector.extract((ChannelInfoItemExtractor) extractor);
|
||||||
} else if(extractor instanceof PlaylistInfoItemExtractor) {
|
} else if (extractor instanceof PlaylistInfoItemExtractor) {
|
||||||
return playlistCollector.extract((PlaylistInfoItemExtractor) extractor);
|
return playlistCollector.extract((PlaylistInfoItemExtractor) extractor);
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException("Invalid extractor type: " + extractor);
|
throw new IllegalArgumentException("Invalid extractor type: " + extractor);
|
||||||
|
@ -55,7 +55,7 @@ public class MediaCCCConferenceExtractor extends ChannelExtractor {
|
|||||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||||
JsonArray events = conferenceData.getArray("events");
|
JsonArray events = conferenceData.getArray("events");
|
||||||
for(int i = 0; i < events.size(); i++) {
|
for (int i = 0; i < events.size(); i++) {
|
||||||
collector.commit(new MediaCCCStreamInfoItemExtractor(events.getObject(i)));
|
collector.commit(new MediaCCCStreamInfoItemExtractor(events.getObject(i)));
|
||||||
}
|
}
|
||||||
return new InfoItemsPage<>(collector, null);
|
return new InfoItemsPage<>(collector, null);
|
||||||
|
@ -32,7 +32,7 @@ public class MediaCCCConferenceKiosk extends KioskExtractor<ChannelInfoItem> {
|
|||||||
public InfoItemsPage<ChannelInfoItem> getInitialPage() throws IOException, ExtractionException {
|
public InfoItemsPage<ChannelInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||||
JsonArray conferences = doc.getArray("conferences");
|
JsonArray conferences = doc.getArray("conferences");
|
||||||
ChannelInfoItemsCollector collector = new ChannelInfoItemsCollector(getServiceId());
|
ChannelInfoItemsCollector collector = new ChannelInfoItemsCollector(getServiceId());
|
||||||
for(int i = 0; i < conferences.size(); i++) {
|
for (int i = 0; i < conferences.size(); i++) {
|
||||||
collector.commit(new MediaCCCConferenceInfoItemExtractor(conferences.getObject(i)));
|
collector.commit(new MediaCCCConferenceInfoItemExtractor(conferences.getObject(i)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
|
|||||||
InfoItemsSearchCollector searchItems = getInfoItemSearchCollector();
|
InfoItemsSearchCollector searchItems = getInfoItemSearchCollector();
|
||||||
searchItems.reset();
|
searchItems.reset();
|
||||||
|
|
||||||
if(getLinkHandler().getContentFilters().contains(CONFERENCES)
|
if (getLinkHandler().getContentFilters().contains(CONFERENCES)
|
||||||
|| getLinkHandler().getContentFilters().contains(ALL)
|
|| getLinkHandler().getContentFilters().contains(ALL)
|
||||||
|| getLinkHandler().getContentFilters().isEmpty()) {
|
|| getLinkHandler().getContentFilters().isEmpty()) {
|
||||||
searchConferences(getSearchString(),
|
searchConferences(getSearchString(),
|
||||||
@ -58,7 +58,7 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
|
|||||||
searchItems);
|
searchItems);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(getLinkHandler().getContentFilters().contains(EVENTS)
|
if (getLinkHandler().getContentFilters().contains(EVENTS)
|
||||||
|| getLinkHandler().getContentFilters().contains(ALL)
|
|| getLinkHandler().getContentFilters().contains(ALL)
|
||||||
|| getLinkHandler().getContentFilters().isEmpty()) {
|
|| getLinkHandler().getContentFilters().isEmpty()) {
|
||||||
JsonArray events = doc.getArray("events");
|
JsonArray events = doc.getArray("events");
|
||||||
@ -82,7 +82,7 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||||
if(getLinkHandler().getContentFilters().contains(EVENTS)
|
if (getLinkHandler().getContentFilters().contains(EVENTS)
|
||||||
|| getLinkHandler().getContentFilters().contains(ALL)
|
|| getLinkHandler().getContentFilters().contains(ALL)
|
||||||
|| getLinkHandler().getContentFilters().isEmpty()) {
|
|| getLinkHandler().getContentFilters().isEmpty()) {
|
||||||
final String site;
|
final String site;
|
||||||
@ -94,7 +94,7 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
|
|||||||
throw new ExtractionException("Could not parse json.", jpe);
|
throw new ExtractionException("Could not parse json.", jpe);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if(getLinkHandler().getContentFilters().contains(CONFERENCES)
|
if (getLinkHandler().getContentFilters().contains(CONFERENCES)
|
||||||
|| getLinkHandler().getContentFilters().contains(ALL)
|
|| getLinkHandler().getContentFilters().contains(ALL)
|
||||||
|| getLinkHandler().getContentFilters().isEmpty())
|
|| getLinkHandler().getContentFilters().isEmpty())
|
||||||
conferenceKiosk.fetchPage();
|
conferenceKiosk.fetchPage();
|
||||||
@ -103,8 +103,8 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
|
|||||||
private void searchConferences(String searchString,
|
private void searchConferences(String searchString,
|
||||||
List<ChannelInfoItem> channelItems,
|
List<ChannelInfoItem> channelItems,
|
||||||
InfoItemsSearchCollector collector) {
|
InfoItemsSearchCollector collector) {
|
||||||
for(final ChannelInfoItem item : channelItems) {
|
for (final ChannelInfoItem item : channelItems) {
|
||||||
if(item.getName().toUpperCase().contains(
|
if (item.getName().toUpperCase().contains(
|
||||||
searchString.toUpperCase())) {
|
searchString.toUpperCase())) {
|
||||||
collector.commit(new ChannelInfoItemExtractor() {
|
collector.commit(new ChannelInfoItemExtractor() {
|
||||||
@Override
|
@Override
|
||||||
|
@ -117,17 +117,17 @@ public class MediaCCCStreamExtractor extends StreamExtractor {
|
|||||||
public List<AudioStream> getAudioStreams() throws IOException, ExtractionException {
|
public List<AudioStream> getAudioStreams() throws IOException, ExtractionException {
|
||||||
final JsonArray recordings = data.getArray("recordings");
|
final JsonArray recordings = data.getArray("recordings");
|
||||||
final List<AudioStream> audioStreams = new ArrayList<>();
|
final List<AudioStream> audioStreams = new ArrayList<>();
|
||||||
for(int i = 0; i < recordings.size(); i++) {
|
for (int i = 0; i < recordings.size(); i++) {
|
||||||
final JsonObject recording = recordings.getObject(i);
|
final JsonObject recording = recordings.getObject(i);
|
||||||
final String mimeType = recording.getString("mime_type");
|
final String mimeType = recording.getString("mime_type");
|
||||||
if(mimeType.startsWith("audio")) {
|
if (mimeType.startsWith("audio")) {
|
||||||
//first we need to resolve the actual video data from CDN
|
//first we need to resolve the actual video data from CDN
|
||||||
final MediaFormat mediaFormat;
|
final MediaFormat mediaFormat;
|
||||||
if(mimeType.endsWith("opus")) {
|
if (mimeType.endsWith("opus")) {
|
||||||
mediaFormat = MediaFormat.OPUS;
|
mediaFormat = MediaFormat.OPUS;
|
||||||
} else if(mimeType.endsWith("mpeg")) {
|
} else if (mimeType.endsWith("mpeg")) {
|
||||||
mediaFormat = MediaFormat.MP3;
|
mediaFormat = MediaFormat.MP3;
|
||||||
} else if(mimeType.endsWith("ogg")){
|
} else if (mimeType.endsWith("ogg")) {
|
||||||
mediaFormat = MediaFormat.OGG;
|
mediaFormat = MediaFormat.OGG;
|
||||||
} else {
|
} else {
|
||||||
throw new ExtractionException("Unknown media format: " + mimeType);
|
throw new ExtractionException("Unknown media format: " + mimeType);
|
||||||
@ -143,16 +143,16 @@ public class MediaCCCStreamExtractor extends StreamExtractor {
|
|||||||
public List<VideoStream> getVideoStreams() throws IOException, ExtractionException {
|
public List<VideoStream> getVideoStreams() throws IOException, ExtractionException {
|
||||||
final JsonArray recordings = data.getArray("recordings");
|
final JsonArray recordings = data.getArray("recordings");
|
||||||
final List<VideoStream> videoStreams = new ArrayList<>();
|
final List<VideoStream> videoStreams = new ArrayList<>();
|
||||||
for(int i = 0; i < recordings.size(); i++) {
|
for (int i = 0; i < recordings.size(); i++) {
|
||||||
final JsonObject recording = recordings.getObject(i);
|
final JsonObject recording = recordings.getObject(i);
|
||||||
final String mimeType = recording.getString("mime_type");
|
final String mimeType = recording.getString("mime_type");
|
||||||
if(mimeType.startsWith("video")) {
|
if (mimeType.startsWith("video")) {
|
||||||
//first we need to resolve the actual video data from CDN
|
//first we need to resolve the actual video data from CDN
|
||||||
|
|
||||||
final MediaFormat mediaFormat;
|
final MediaFormat mediaFormat;
|
||||||
if(mimeType.endsWith("webm")) {
|
if (mimeType.endsWith("webm")) {
|
||||||
mediaFormat = MediaFormat.WEBM;
|
mediaFormat = MediaFormat.WEBM;
|
||||||
} else if(mimeType.endsWith("mp4")) {
|
} else if (mimeType.endsWith("mp4")) {
|
||||||
mediaFormat = MediaFormat.MPEG_4;
|
mediaFormat = MediaFormat.MPEG_4;
|
||||||
} else {
|
} else {
|
||||||
throw new ExtractionException("Unknown media format: " + mimeType);
|
throw new ExtractionException("Unknown media format: " + mimeType);
|
||||||
|
@ -15,9 +15,9 @@ public class MediaCCCConferenceLinkHandlerFactory extends ListLinkHandlerFactory
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getId(String url) throws ParsingException {
|
public String getId(String url) throws ParsingException {
|
||||||
if(url.startsWith("https://api.media.ccc.de/public/conferences/")) {
|
if (url.startsWith("https://api.media.ccc.de/public/conferences/")) {
|
||||||
return url.replace("https://api.media.ccc.de/public/conferences/", "");
|
return url.replace("https://api.media.ccc.de/public/conferences/", "");
|
||||||
} else if(url.startsWith("https://media.ccc.de/c/")) {
|
} else if (url.startsWith("https://media.ccc.de/c/")) {
|
||||||
return Parser.matchGroup1("https://media.ccc.de/c/([^?#]*)", url);
|
return Parser.matchGroup1("https://media.ccc.de/c/([^?#]*)", url);
|
||||||
} else {
|
} else {
|
||||||
throw new ParsingException("Could not get id from url: " + url);
|
throw new ParsingException("Could not get id from url: " + url);
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
package org.schabi.newpipe.extractor.services.media_ccc.linkHandler;
|
package org.schabi.newpipe.extractor.services.media_ccc.linkHandler;
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -15,7 +15,7 @@ public class MediaCCCSearchQueryHandlerFactory extends SearchQueryHandlerFactory
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String[] getAvailableContentFilter() {
|
public String[] getAvailableContentFilter() {
|
||||||
return new String[] {
|
return new String[]{
|
||||||
ALL,
|
ALL,
|
||||||
CONFERENCES,
|
CONFERENCES,
|
||||||
EVENTS
|
EVENTS
|
||||||
|
@ -7,7 +7,7 @@ public class MediaCCCStreamLinkHandlerFactory extends LinkHandlerFactory {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getId(String url) throws ParsingException {
|
public String getId(String url) throws ParsingException {
|
||||||
if(url.startsWith("https://api.media.ccc.de/public/events/") &&
|
if (url.startsWith("https://api.media.ccc.de/public/events/") &&
|
||||||
!url.contains("?q=")) {
|
!url.contains("?q=")) {
|
||||||
return url.replace("https://api.media.ccc.de/public/events/", "");
|
return url.replace("https://api.media.ccc.de/public/events/", "");
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube;
|
package org.schabi.newpipe.extractor.services.peertube;
|
||||||
|
|
||||||
import java.io.IOException;
|
import com.grack.nanojson.JsonObject;
|
||||||
|
import com.grack.nanojson.JsonParser;
|
||||||
|
import com.grack.nanojson.JsonParserException;
|
||||||
import org.jsoup.helper.StringUtil;
|
import org.jsoup.helper.StringUtil;
|
||||||
import org.schabi.newpipe.extractor.NewPipe;
|
import org.schabi.newpipe.extractor.NewPipe;
|
||||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||||
@ -10,9 +11,7 @@ import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|||||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||||
|
|
||||||
import com.grack.nanojson.JsonObject;
|
import java.io.IOException;
|
||||||
import com.grack.nanojson.JsonParser;
|
|
||||||
import com.grack.nanojson.JsonParserException;
|
|
||||||
|
|
||||||
public class PeertubeInstance {
|
public class PeertubeInstance {
|
||||||
|
|
||||||
@ -25,7 +24,7 @@ public class PeertubeInstance {
|
|||||||
this.name = "PeerTube";
|
this.name = "PeerTube";
|
||||||
}
|
}
|
||||||
|
|
||||||
public PeertubeInstance(String url , String name) {
|
public PeertubeInstance(String url, String name) {
|
||||||
this.url = url;
|
this.url = url;
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
@ -44,7 +43,7 @@ public class PeertubeInstance {
|
|||||||
throw new Exception("unable to configure instance " + url, e);
|
throw new Exception("unable to configure instance " + url, e);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(null == response || StringUtil.isBlank(response.responseBody())) {
|
if (response == null || StringUtil.isBlank(response.responseBody())) {
|
||||||
throw new Exception("unable to configure instance " + url);
|
throw new Exception("unable to configure instance " + url);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,17 +1,16 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube;
|
package org.schabi.newpipe.extractor.services.peertube;
|
||||||
|
|
||||||
|
import com.grack.nanojson.JsonObject;
|
||||||
|
import org.jsoup.helper.StringUtil;
|
||||||
|
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
||||||
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
|
|
||||||
import java.text.ParseException;
|
import java.text.ParseException;
|
||||||
import java.text.SimpleDateFormat;
|
import java.text.SimpleDateFormat;
|
||||||
import java.util.Calendar;
|
import java.util.Calendar;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.TimeZone;
|
import java.util.TimeZone;
|
||||||
|
|
||||||
import org.jsoup.helper.StringUtil;
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|
||||||
|
|
||||||
import com.grack.nanojson.JsonObject;
|
|
||||||
|
|
||||||
public class PeertubeParsingHelper {
|
public class PeertubeParsingHelper {
|
||||||
|
|
||||||
private PeertubeParsingHelper() {
|
private PeertubeParsingHelper() {
|
||||||
|
@ -1,38 +1,24 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube;
|
package org.schabi.newpipe.extractor.services.peertube;
|
||||||
|
|
||||||
import static java.util.Arrays.asList;
|
|
||||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.COMMENTS;
|
|
||||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.VIDEO;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
import org.schabi.newpipe.extractor.linkhandler.*;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeChannelExtractor;
|
import org.schabi.newpipe.extractor.services.peertube.extractors.*;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeCommentsExtractor;
|
import org.schabi.newpipe.extractor.services.peertube.linkHandler.*;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeSearchExtractor;
|
|
||||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeStreamExtractor;
|
|
||||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeSuggestionExtractor;
|
|
||||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeTrendingExtractor;
|
|
||||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeChannelLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeCommentsLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeSearchQueryHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeStreamLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeTrendingLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||||
|
|
||||||
|
import static java.util.Arrays.asList;
|
||||||
|
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.COMMENTS;
|
||||||
|
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.VIDEO;
|
||||||
|
|
||||||
public class PeertubeService extends StreamingService {
|
public class PeertubeService extends StreamingService {
|
||||||
|
|
||||||
private PeertubeInstance instance;
|
private PeertubeInstance instance;
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||||
|
|
||||||
import java.io.IOException;
|
import com.grack.nanojson.JsonArray;
|
||||||
|
import com.grack.nanojson.JsonObject;
|
||||||
|
import com.grack.nanojson.JsonParser;
|
||||||
|
import com.grack.nanojson.JsonParserException;
|
||||||
import org.jsoup.helper.StringUtil;
|
import org.jsoup.helper.StringUtil;
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||||
@ -17,10 +19,7 @@ import org.schabi.newpipe.extractor.utils.JsonUtils;
|
|||||||
import org.schabi.newpipe.extractor.utils.Parser;
|
import org.schabi.newpipe.extractor.utils.Parser;
|
||||||
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
||||||
|
|
||||||
import com.grack.nanojson.JsonArray;
|
import java.io.IOException;
|
||||||
import com.grack.nanojson.JsonObject;
|
|
||||||
import com.grack.nanojson.JsonParser;
|
|
||||||
import com.grack.nanojson.JsonParserException;
|
|
||||||
|
|
||||||
public class PeertubeChannelExtractor extends ChannelExtractor {
|
public class PeertubeChannelExtractor extends ChannelExtractor {
|
||||||
|
|
||||||
@ -45,7 +44,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||||||
String value;
|
String value;
|
||||||
try {
|
try {
|
||||||
value = JsonUtils.getString(json, "avatar.path");
|
value = JsonUtils.getString(json, "avatar.path");
|
||||||
}catch(Exception e) {
|
} catch (Exception e) {
|
||||||
value = "/client/assets/images/default-avatar.png";
|
value = "/client/assets/images/default-avatar.png";
|
||||||
}
|
}
|
||||||
return baseUrl + value;
|
return baseUrl + value;
|
||||||
@ -71,7 +70,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||||||
public String getDescription() throws ParsingException {
|
public String getDescription() throws ParsingException {
|
||||||
try {
|
try {
|
||||||
return JsonUtils.getString(json, "description");
|
return JsonUtils.getString(json, "description");
|
||||||
}catch(ParsingException e) {
|
} catch (ParsingException e) {
|
||||||
return "No description";
|
return "No description";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -86,12 +85,12 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||||||
JsonArray contents;
|
JsonArray contents;
|
||||||
try {
|
try {
|
||||||
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
||||||
}catch(Exception e) {
|
} catch (Exception e) {
|
||||||
throw new ParsingException("unable to extract channel streams", e);
|
throw new ParsingException("unable to extract channel streams", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
for(Object c: contents) {
|
for (Object c : contents) {
|
||||||
if(c instanceof JsonObject) {
|
if (c instanceof JsonObject) {
|
||||||
final JsonObject item = (JsonObject) c;
|
final JsonObject item = (JsonObject) c;
|
||||||
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
|
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
|
||||||
collector.commit(extractor);
|
collector.commit(extractor);
|
||||||
@ -110,7 +109,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||||||
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||||
Response response = getDownloader().get(pageUrl);
|
Response response = getDownloader().get(pageUrl);
|
||||||
JsonObject json = null;
|
JsonObject json = null;
|
||||||
if(null != response && !StringUtil.isBlank(response.responseBody())) {
|
if (null != response && !StringUtil.isBlank(response.responseBody())) {
|
||||||
try {
|
try {
|
||||||
json = JsonParser.object().from(response.responseBody());
|
json = JsonParser.object().from(response.responseBody());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
@ -119,13 +118,13 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||||
if(json != null) {
|
if (json != null) {
|
||||||
PeertubeParsingHelper.validate(json);
|
PeertubeParsingHelper.validate(json);
|
||||||
Number number = JsonUtils.getNumber(json, "total");
|
Number number = JsonUtils.getNumber(json, "total");
|
||||||
if(number != null) this.total = number.longValue();
|
if (number != null) this.total = number.longValue();
|
||||||
collectStreamsFrom(collector, json, pageUrl);
|
collectStreamsFrom(collector, json, pageUrl);
|
||||||
} else {
|
} else {
|
||||||
throw new ExtractionException("Unable to get peertube kiosk info");
|
throw new ExtractionException("Unable to get PeerTube kiosk info");
|
||||||
}
|
}
|
||||||
return new InfoItemsPage<>(collector, getNextPageUrl(pageUrl));
|
return new InfoItemsPage<>(collector, getNextPageUrl(pageUrl));
|
||||||
}
|
}
|
||||||
@ -138,7 +137,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||||||
} catch (RegexException e) {
|
} catch (RegexException e) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
if(StringUtil.isBlank(prevStart)) return "";
|
if (StringUtil.isBlank(prevStart)) return "";
|
||||||
long nextStart = 0;
|
long nextStart = 0;
|
||||||
try {
|
try {
|
||||||
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
||||||
@ -146,9 +145,9 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
if(nextStart >= total) {
|
if (nextStart >= total) {
|
||||||
return "";
|
return "";
|
||||||
}else {
|
} else {
|
||||||
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -156,10 +155,10 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||||||
@Override
|
@Override
|
||||||
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
|
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
|
||||||
Response response = downloader.get(getUrl());
|
Response response = downloader.get(getUrl());
|
||||||
if(null != response && null != response.responseBody()) {
|
if (null != response && null != response.responseBody()) {
|
||||||
setInitialData(response.responseBody());
|
setInitialData(response.responseBody());
|
||||||
}else {
|
} else {
|
||||||
throw new ExtractionException("Unable to extract peertube channel data");
|
throw new ExtractionException("Unable to extract PeerTube channel data");
|
||||||
}
|
}
|
||||||
|
|
||||||
String pageUrl = getUrl() + "/videos?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
|
String pageUrl = getUrl() + "/videos?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
|
||||||
@ -172,7 +171,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
|||||||
} catch (JsonParserException e) {
|
} catch (JsonParserException e) {
|
||||||
throw new ExtractionException("Unable to extract peertube channel data", e);
|
throw new ExtractionException("Unable to extract peertube channel data", e);
|
||||||
}
|
}
|
||||||
if(null == json) throw new ExtractionException("Unable to extract peertube channel data");
|
if (json == null) throw new ExtractionException("Unable to extract PeerTube channel data");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||||
|
|
||||||
import java.io.IOException;
|
import com.grack.nanojson.JsonArray;
|
||||||
|
import com.grack.nanojson.JsonObject;
|
||||||
|
import com.grack.nanojson.JsonParser;
|
||||||
import org.jsoup.helper.StringUtil;
|
import org.jsoup.helper.StringUtil;
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||||
@ -16,9 +17,7 @@ import org.schabi.newpipe.extractor.utils.JsonUtils;
|
|||||||
import org.schabi.newpipe.extractor.utils.Parser;
|
import org.schabi.newpipe.extractor.utils.Parser;
|
||||||
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
||||||
|
|
||||||
import com.grack.nanojson.JsonArray;
|
import java.io.IOException;
|
||||||
import com.grack.nanojson.JsonObject;
|
|
||||||
import com.grack.nanojson.JsonParser;
|
|
||||||
|
|
||||||
public class PeertubeCommentsExtractor extends CommentsExtractor {
|
public class PeertubeCommentsExtractor extends CommentsExtractor {
|
||||||
|
|
||||||
@ -49,12 +48,12 @@ public class PeertubeCommentsExtractor extends CommentsExtractor {
|
|||||||
JsonArray contents;
|
JsonArray contents;
|
||||||
try {
|
try {
|
||||||
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
||||||
}catch(Exception e) {
|
} catch (Exception e) {
|
||||||
throw new ParsingException("unable to extract comments info", e);
|
throw new ParsingException("unable to extract comments info", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
for(Object c: contents) {
|
for (Object c : contents) {
|
||||||
if(c instanceof JsonObject) {
|
if (c instanceof JsonObject) {
|
||||||
final JsonObject item = (JsonObject) c;
|
final JsonObject item = (JsonObject) c;
|
||||||
PeertubeCommentsInfoItemExtractor extractor = new PeertubeCommentsInfoItemExtractor(item, this);
|
PeertubeCommentsInfoItemExtractor extractor = new PeertubeCommentsInfoItemExtractor(item, this);
|
||||||
collector.commit(extractor);
|
collector.commit(extractor);
|
||||||
@ -73,7 +72,7 @@ public class PeertubeCommentsExtractor extends CommentsExtractor {
|
|||||||
public InfoItemsPage<CommentsInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
public InfoItemsPage<CommentsInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||||
Response response = getDownloader().get(pageUrl);
|
Response response = getDownloader().get(pageUrl);
|
||||||
JsonObject json = null;
|
JsonObject json = null;
|
||||||
if(null != response && !StringUtil.isBlank(response.responseBody())) {
|
if (null != response && !StringUtil.isBlank(response.responseBody())) {
|
||||||
try {
|
try {
|
||||||
json = JsonParser.object().from(response.responseBody());
|
json = JsonParser.object().from(response.responseBody());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
@ -82,9 +81,9 @@ public class PeertubeCommentsExtractor extends CommentsExtractor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
CommentsInfoItemsCollector collector = new CommentsInfoItemsCollector(getServiceId());
|
CommentsInfoItemsCollector collector = new CommentsInfoItemsCollector(getServiceId());
|
||||||
if(json != null) {
|
if (json != null) {
|
||||||
Number number = JsonUtils.getNumber(json, "total");
|
Number number = JsonUtils.getNumber(json, "total");
|
||||||
if(number != null) this.total = number.longValue();
|
if (number != null) this.total = number.longValue();
|
||||||
collectStreamsFrom(collector, json, pageUrl);
|
collectStreamsFrom(collector, json, pageUrl);
|
||||||
} else {
|
} else {
|
||||||
throw new ExtractionException("Unable to get peertube comments info");
|
throw new ExtractionException("Unable to get peertube comments info");
|
||||||
@ -105,7 +104,7 @@ public class PeertubeCommentsExtractor extends CommentsExtractor {
|
|||||||
} catch (RegexException e) {
|
} catch (RegexException e) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
if(StringUtil.isBlank(prevStart)) return "";
|
if (StringUtil.isBlank(prevStart)) return "";
|
||||||
long nextStart = 0;
|
long nextStart = 0;
|
||||||
try {
|
try {
|
||||||
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
||||||
@ -113,9 +112,9 @@ public class PeertubeCommentsExtractor extends CommentsExtractor {
|
|||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
if(nextStart >= total) {
|
if (nextStart >= total) {
|
||||||
return "";
|
return "";
|
||||||
}else {
|
} else {
|
||||||
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||||
|
|
||||||
|
import com.grack.nanojson.JsonObject;
|
||||||
import org.jsoup.Jsoup;
|
import org.jsoup.Jsoup;
|
||||||
import org.jsoup.nodes.Document;
|
import org.jsoup.nodes.Document;
|
||||||
import org.schabi.newpipe.extractor.ServiceList;
|
import org.schabi.newpipe.extractor.ServiceList;
|
||||||
@ -9,8 +10,6 @@ import org.schabi.newpipe.extractor.localization.DateWrapper;
|
|||||||
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
|
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
|
||||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||||
|
|
||||||
import com.grack.nanojson.JsonObject;
|
|
||||||
|
|
||||||
|
|
||||||
public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtractor {
|
public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtractor {
|
||||||
|
|
||||||
@ -34,7 +33,7 @@ public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtrac
|
|||||||
String value;
|
String value;
|
||||||
try {
|
try {
|
||||||
value = JsonUtils.getString(item, "account.avatar.path");
|
value = JsonUtils.getString(item, "account.avatar.path");
|
||||||
}catch(Exception e) {
|
} catch (Exception e) {
|
||||||
value = "/client/assets/images/default-avatar.png";
|
value = "/client/assets/images/default-avatar.png";
|
||||||
}
|
}
|
||||||
return baseUrl + value;
|
return baseUrl + value;
|
||||||
@ -67,7 +66,7 @@ public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtrac
|
|||||||
try {
|
try {
|
||||||
Document doc = Jsoup.parse(htmlText);
|
Document doc = Jsoup.parse(htmlText);
|
||||||
return doc.body().text();
|
return doc.body().text();
|
||||||
} catch(Exception e) {
|
} catch (Exception e) {
|
||||||
return htmlText.replaceAll("(?s)<[^>]*>(\\s*<[^>]*>)*", "");
|
return htmlText.replaceAll("(?s)<[^>]*>(\\s*<[^>]*>)*", "");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -83,7 +82,7 @@ public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtrac
|
|||||||
String value;
|
String value;
|
||||||
try {
|
try {
|
||||||
value = JsonUtils.getString(item, "account.avatar.path");
|
value = JsonUtils.getString(item, "account.avatar.path");
|
||||||
} catch(Exception e) {
|
} catch (Exception e) {
|
||||||
value = "/client/assets/images/default-avatar.png";
|
value = "/client/assets/images/default-avatar.png";
|
||||||
}
|
}
|
||||||
return baseUrl + value;
|
return baseUrl + value;
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
@ -10,7 +8,9 @@ import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
|||||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||||
|
|
||||||
public class PeertubePlaylistExtractor extends PlaylistExtractor{
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class PeertubePlaylistExtractor extends PlaylistExtractor {
|
||||||
|
|
||||||
public PeertubePlaylistExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
public PeertubePlaylistExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||||
super(service, linkHandler);
|
super(service, linkHandler);
|
||||||
@ -73,7 +73,6 @@ public class PeertubePlaylistExtractor extends PlaylistExtractor{
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
|
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||||
|
|
||||||
import java.io.IOException;
|
import com.grack.nanojson.JsonArray;
|
||||||
|
import com.grack.nanojson.JsonObject;
|
||||||
|
import com.grack.nanojson.JsonParser;
|
||||||
import org.jsoup.helper.StringUtil;
|
import org.jsoup.helper.StringUtil;
|
||||||
import org.schabi.newpipe.extractor.InfoItem;
|
import org.schabi.newpipe.extractor.InfoItem;
|
||||||
import org.schabi.newpipe.extractor.InfoItemExtractor;
|
import org.schabi.newpipe.extractor.InfoItemExtractor;
|
||||||
@ -18,9 +19,7 @@ import org.schabi.newpipe.extractor.utils.JsonUtils;
|
|||||||
import org.schabi.newpipe.extractor.utils.Parser;
|
import org.schabi.newpipe.extractor.utils.Parser;
|
||||||
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
||||||
|
|
||||||
import com.grack.nanojson.JsonArray;
|
import java.io.IOException;
|
||||||
import com.grack.nanojson.JsonObject;
|
|
||||||
import com.grack.nanojson.JsonParser;
|
|
||||||
|
|
||||||
public class PeertubeSearchExtractor extends SearchExtractor {
|
public class PeertubeSearchExtractor extends SearchExtractor {
|
||||||
|
|
||||||
@ -54,13 +53,13 @@ public class PeertubeSearchExtractor extends SearchExtractor {
|
|||||||
JsonArray contents;
|
JsonArray contents;
|
||||||
try {
|
try {
|
||||||
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
||||||
}catch(Exception e) {
|
} catch (Exception e) {
|
||||||
throw new ParsingException("unable to extract search info", e);
|
throw new ParsingException("unable to extract search info", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
String baseUrl = getBaseUrl();
|
String baseUrl = getBaseUrl();
|
||||||
for(Object c: contents) {
|
for (Object c : contents) {
|
||||||
if(c instanceof JsonObject) {
|
if (c instanceof JsonObject) {
|
||||||
final JsonObject item = (JsonObject) c;
|
final JsonObject item = (JsonObject) c;
|
||||||
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
|
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
|
||||||
collector.commit(extractor);
|
collector.commit(extractor);
|
||||||
@ -81,7 +80,7 @@ public class PeertubeSearchExtractor extends SearchExtractor {
|
|||||||
public InfoItemsPage<InfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
public InfoItemsPage<InfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||||
Response response = getDownloader().get(pageUrl);
|
Response response = getDownloader().get(pageUrl);
|
||||||
JsonObject json = null;
|
JsonObject json = null;
|
||||||
if(null != response && !StringUtil.isBlank(response.responseBody())) {
|
if (null != response && !StringUtil.isBlank(response.responseBody())) {
|
||||||
try {
|
try {
|
||||||
json = JsonParser.object().from(response.responseBody());
|
json = JsonParser.object().from(response.responseBody());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
@ -89,9 +88,9 @@ public class PeertubeSearchExtractor extends SearchExtractor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(json != null) {
|
if (json != null) {
|
||||||
Number number = JsonUtils.getNumber(json, "total");
|
Number number = JsonUtils.getNumber(json, "total");
|
||||||
if(number != null) this.total = number.longValue();
|
if (number != null) this.total = number.longValue();
|
||||||
return new InfoItemsPage<>(collectStreamsFrom(json), getNextPageUrl(pageUrl));
|
return new InfoItemsPage<>(collectStreamsFrom(json), getNextPageUrl(pageUrl));
|
||||||
} else {
|
} else {
|
||||||
throw new ExtractionException("Unable to get peertube search info");
|
throw new ExtractionException("Unable to get peertube search info");
|
||||||
@ -111,7 +110,7 @@ public class PeertubeSearchExtractor extends SearchExtractor {
|
|||||||
} catch (RegexException e) {
|
} catch (RegexException e) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
if(StringUtil.isBlank(prevStart)) return "";
|
if (StringUtil.isBlank(prevStart)) return "";
|
||||||
long nextStart = 0;
|
long nextStart = 0;
|
||||||
try {
|
try {
|
||||||
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
||||||
@ -119,9 +118,9 @@ public class PeertubeSearchExtractor extends SearchExtractor {
|
|||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
if(nextStart >= total) {
|
if (nextStart >= total) {
|
||||||
return "";
|
return "";
|
||||||
}else {
|
} else {
|
||||||
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,9 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||||
|
|
||||||
import java.io.IOException;
|
import com.grack.nanojson.JsonArray;
|
||||||
import java.io.UnsupportedEncodingException;
|
import com.grack.nanojson.JsonObject;
|
||||||
import java.net.URLEncoder;
|
import com.grack.nanojson.JsonParser;
|
||||||
import java.util.ArrayList;
|
import com.grack.nanojson.JsonParserException;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Locale;
|
|
||||||
|
|
||||||
import org.jsoup.helper.StringUtil;
|
import org.jsoup.helper.StringUtil;
|
||||||
import org.schabi.newpipe.extractor.MediaFormat;
|
import org.schabi.newpipe.extractor.MediaFormat;
|
||||||
import org.schabi.newpipe.extractor.NewPipe;
|
import org.schabi.newpipe.extractor.NewPipe;
|
||||||
@ -24,12 +20,14 @@ import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeSearch
|
|||||||
import org.schabi.newpipe.extractor.stream.*;
|
import org.schabi.newpipe.extractor.stream.*;
|
||||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||||
|
|
||||||
import com.grack.nanojson.JsonArray;
|
|
||||||
import com.grack.nanojson.JsonObject;
|
|
||||||
import com.grack.nanojson.JsonParser;
|
|
||||||
import com.grack.nanojson.JsonParserException;
|
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.UnsupportedEncodingException;
|
||||||
|
import java.net.URLEncoder;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
public class PeertubeStreamExtractor extends StreamExtractor {
|
public class PeertubeStreamExtractor extends StreamExtractor {
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||||
|
|
||||||
|
import com.grack.nanojson.JsonObject;
|
||||||
import org.schabi.newpipe.extractor.ServiceList;
|
import org.schabi.newpipe.extractor.ServiceList;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||||
@ -8,8 +9,6 @@ import org.schabi.newpipe.extractor.stream.StreamInfoItemExtractor;
|
|||||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||||
|
|
||||||
import com.grack.nanojson.JsonObject;
|
|
||||||
|
|
||||||
public class PeertubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
public class PeertubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
||||||
|
|
||||||
protected final JsonObject item;
|
protected final JsonObject item;
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class PeertubeSubscriptionExtractor extends SubscriptionExtractor {
|
public class PeertubeSubscriptionExtractor extends SubscriptionExtractor {
|
||||||
|
|
||||||
public PeertubeSubscriptionExtractor(StreamingService service, List<ContentSource> supportedSources) {
|
public PeertubeSubscriptionExtractor(StreamingService service, List<ContentSource> supportedSources) {
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||||
|
|
||||||
public class PeertubeSuggestionExtractor extends SuggestionExtractor{
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class PeertubeSuggestionExtractor extends SuggestionExtractor {
|
||||||
|
|
||||||
public PeertubeSuggestionExtractor(StreamingService service) {
|
public PeertubeSuggestionExtractor(StreamingService service) {
|
||||||
super(service);
|
super(service);
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||||
|
|
||||||
import java.io.IOException;
|
import com.grack.nanojson.JsonArray;
|
||||||
|
import com.grack.nanojson.JsonObject;
|
||||||
|
import com.grack.nanojson.JsonParser;
|
||||||
import org.jsoup.helper.StringUtil;
|
import org.jsoup.helper.StringUtil;
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||||
@ -16,9 +17,7 @@ import org.schabi.newpipe.extractor.utils.JsonUtils;
|
|||||||
import org.schabi.newpipe.extractor.utils.Parser;
|
import org.schabi.newpipe.extractor.utils.Parser;
|
||||||
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
||||||
|
|
||||||
import com.grack.nanojson.JsonArray;
|
import java.io.IOException;
|
||||||
import com.grack.nanojson.JsonObject;
|
|
||||||
import com.grack.nanojson.JsonParser;
|
|
||||||
|
|
||||||
public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
|
public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
|
||||||
|
|
||||||
@ -49,13 +48,13 @@ public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
|
|||||||
JsonArray contents;
|
JsonArray contents;
|
||||||
try {
|
try {
|
||||||
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
||||||
}catch(Exception e) {
|
} catch (Exception e) {
|
||||||
throw new ParsingException("unable to extract kiosk info", e);
|
throw new ParsingException("Unable to extract kiosk info", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
String baseUrl = getBaseUrl();
|
String baseUrl = getBaseUrl();
|
||||||
for(Object c: contents) {
|
for (Object c : contents) {
|
||||||
if(c instanceof JsonObject) {
|
if (c instanceof JsonObject) {
|
||||||
final JsonObject item = (JsonObject) c;
|
final JsonObject item = (JsonObject) c;
|
||||||
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
|
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
|
||||||
collector.commit(extractor);
|
collector.commit(extractor);
|
||||||
@ -74,7 +73,7 @@ public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
|
|||||||
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||||
Response response = getDownloader().get(pageUrl);
|
Response response = getDownloader().get(pageUrl);
|
||||||
JsonObject json = null;
|
JsonObject json = null;
|
||||||
if(null != response && !StringUtil.isBlank(response.responseBody())) {
|
if (null != response && !StringUtil.isBlank(response.responseBody())) {
|
||||||
try {
|
try {
|
||||||
json = JsonParser.object().from(response.responseBody());
|
json = JsonParser.object().from(response.responseBody());
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
@ -83,9 +82,9 @@ public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||||
if(json != null) {
|
if (json != null) {
|
||||||
Number number = JsonUtils.getNumber(json, "total");
|
Number number = JsonUtils.getNumber(json, "total");
|
||||||
if(number != null) this.total = number.longValue();
|
if (number != null) this.total = number.longValue();
|
||||||
collectStreamsFrom(collector, json, pageUrl);
|
collectStreamsFrom(collector, json, pageUrl);
|
||||||
} else {
|
} else {
|
||||||
throw new ExtractionException("Unable to get peertube kiosk info");
|
throw new ExtractionException("Unable to get peertube kiosk info");
|
||||||
@ -106,7 +105,7 @@ public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
|
|||||||
} catch (RegexException e) {
|
} catch (RegexException e) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
if(StringUtil.isBlank(prevStart)) return "";
|
if (StringUtil.isBlank(prevStart)) return "";
|
||||||
long nextStart = 0;
|
long nextStart = 0;
|
||||||
try {
|
try {
|
||||||
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
||||||
@ -114,9 +113,9 @@ public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
|
|||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
if(nextStart >= total) {
|
if (nextStart >= total) {
|
||||||
return "";
|
return "";
|
||||||
}else {
|
} else {
|
||||||
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.ServiceList;
|
import org.schabi.newpipe.extractor.ServiceList;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||||
import org.schabi.newpipe.extractor.utils.Parser;
|
import org.schabi.newpipe.extractor.utils.Parser;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class PeertubeChannelLinkHandlerFactory extends ListLinkHandlerFactory {
|
public class PeertubeChannelLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||||
|
|
||||||
private static final PeertubeChannelLinkHandlerFactory instance = new PeertubeChannelLinkHandlerFactory();
|
private static final PeertubeChannelLinkHandlerFactory instance = new PeertubeChannelLinkHandlerFactory();
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.ServiceList;
|
import org.schabi.newpipe.extractor.ServiceList;
|
||||||
import org.schabi.newpipe.extractor.exceptions.FoundAdException;
|
import org.schabi.newpipe.extractor.exceptions.FoundAdException;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||||
import org.schabi.newpipe.extractor.utils.Parser;
|
import org.schabi.newpipe.extractor.utils.Parser;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class PeertubeCommentsLinkHandlerFactory extends ListLinkHandlerFactory {
|
public class PeertubeCommentsLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||||
|
|
||||||
private static final PeertubeCommentsLinkHandlerFactory instance = new PeertubeCommentsLinkHandlerFactory();
|
private static final PeertubeCommentsLinkHandlerFactory instance = new PeertubeCommentsLinkHandlerFactory();
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
||||||
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.ServiceList;
|
import org.schabi.newpipe.extractor.ServiceList;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||||
import org.schabi.newpipe.extractor.utils.Parser;
|
import org.schabi.newpipe.extractor.utils.Parser;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class PeertubePlaylistLinkHandlerFactory extends ListLinkHandlerFactory {
|
public class PeertubePlaylistLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||||
|
|
||||||
private static final PeertubePlaylistLinkHandlerFactory instance = new PeertubePlaylistLinkHandlerFactory();
|
private static final PeertubePlaylistLinkHandlerFactory instance = new PeertubePlaylistLinkHandlerFactory();
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
||||||
|
|
||||||
import java.io.UnsupportedEncodingException;
|
|
||||||
import java.net.URLEncoder;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.ServiceList;
|
import org.schabi.newpipe.extractor.ServiceList;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
||||||
|
|
||||||
|
import java.io.UnsupportedEncodingException;
|
||||||
|
import java.net.URLEncoder;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class PeertubeSearchQueryHandlerFactory extends SearchQueryHandlerFactory {
|
public class PeertubeSearchQueryHandlerFactory extends SearchQueryHandlerFactory {
|
||||||
|
|
||||||
public static final String CHARSET_UTF_8 = "UTF-8";
|
public static final String CHARSET_UTF_8 = "UTF-8";
|
||||||
@ -38,6 +38,6 @@ public class PeertubeSearchQueryHandlerFactory extends SearchQueryHandlerFactory
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String[] getAvailableContentFilter() {
|
public String[] getAvailableContentFilter() {
|
||||||
return new String[] { VIDEOS };
|
return new String[]{VIDEOS};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
||||||
|
|
||||||
|
import org.schabi.newpipe.extractor.ServiceList;
|
||||||
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
|
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||||
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.ServiceList;
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
|
||||||
|
|
||||||
public class PeertubeTrendingLinkHandlerFactory extends ListLinkHandlerFactory {
|
public class PeertubeTrendingLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||||
|
|
||||||
|
|
||||||
@ -30,7 +30,7 @@ public class PeertubeTrendingLinkHandlerFactory extends ListLinkHandlerFactory {
|
|||||||
KIOSK_MAP = Collections.unmodifiableMap(map);
|
KIOSK_MAP = Collections.unmodifiableMap(map);
|
||||||
|
|
||||||
Map<String, String> reverseMap = new HashMap<>();
|
Map<String, String> reverseMap = new HashMap<>();
|
||||||
for(Map.Entry<String, String> entry : KIOSK_MAP.entrySet()){
|
for (Map.Entry<String, String> entry : KIOSK_MAP.entrySet()) {
|
||||||
reverseMap.put(entry.getValue(), entry.getKey());
|
reverseMap.put(entry.getValue(), entry.getKey());
|
||||||
}
|
}
|
||||||
REVERSE_KIOSK_MAP = Collections.unmodifiableMap(reverseMap);
|
REVERSE_KIOSK_MAP = Collections.unmodifiableMap(reverseMap);
|
||||||
|
@ -4,9 +4,9 @@ import com.grack.nanojson.JsonArray;
|
|||||||
import com.grack.nanojson.JsonObject;
|
import com.grack.nanojson.JsonObject;
|
||||||
import com.grack.nanojson.JsonParser;
|
import com.grack.nanojson.JsonParser;
|
||||||
import com.grack.nanojson.JsonParserException;
|
import com.grack.nanojson.JsonParserException;
|
||||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||||
|
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||||
@ -86,7 +86,7 @@ public class SoundcloudChannelExtractor extends ChannelExtractor {
|
|||||||
@Nonnull
|
@Nonnull
|
||||||
@Override
|
@Override
|
||||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws ExtractionException {
|
public InfoItemsPage<StreamInfoItem> getInitialPage() throws ExtractionException {
|
||||||
if(streamInfoItemsCollector == null) {
|
if (streamInfoItemsCollector == null) {
|
||||||
computeNextPageAndGetStreams();
|
computeNextPageAndGetStreams();
|
||||||
}
|
}
|
||||||
return new InfoItemsPage<>(streamInfoItemsCollector, getNextPageUrl());
|
return new InfoItemsPage<>(streamInfoItemsCollector, getNextPageUrl());
|
||||||
@ -94,7 +94,7 @@ public class SoundcloudChannelExtractor extends ChannelExtractor {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getNextPageUrl() throws ExtractionException {
|
public String getNextPageUrl() throws ExtractionException {
|
||||||
if(nextPageUrl == null) {
|
if (nextPageUrl == null) {
|
||||||
computeNextPageAndGetStreams();
|
computeNextPageAndGetStreams();
|
||||||
}
|
}
|
||||||
return nextPageUrl;
|
return nextPageUrl;
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package org.schabi.newpipe.extractor.services.soundcloud;
|
package org.schabi.newpipe.extractor.services.soundcloud;
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
|
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||||
import org.schabi.newpipe.extractor.utils.Parser;
|
import org.schabi.newpipe.extractor.utils.Parser;
|
||||||
import org.schabi.newpipe.extractor.utils.Utils;
|
import org.schabi.newpipe.extractor.utils.Utils;
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package org.schabi.newpipe.extractor.services.soundcloud;
|
package org.schabi.newpipe.extractor.services.soundcloud;
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
|
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||||
@ -68,7 +68,7 @@ public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getNextPageUrl() throws IOException, ExtractionException {
|
public String getNextPageUrl() throws IOException, ExtractionException {
|
||||||
if(nextPageUrl == null) {
|
if (nextPageUrl == null) {
|
||||||
computNextPageAndStreams();
|
computNextPageAndStreams();
|
||||||
}
|
}
|
||||||
return nextPageUrl;
|
return nextPageUrl;
|
||||||
@ -77,7 +77,7 @@ public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
|
|||||||
@Nonnull
|
@Nonnull
|
||||||
@Override
|
@Override
|
||||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||||
if(collector == null) {
|
if (collector == null) {
|
||||||
computNextPageAndStreams();
|
computNextPageAndStreams();
|
||||||
}
|
}
|
||||||
return new InfoItemsPage<>(collector, getNextPageUrl());
|
return new InfoItemsPage<>(collector, getNextPageUrl());
|
||||||
|
@ -103,7 +103,7 @@ public class SoundcloudParsingHelper {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Call the endpoint "/resolve" of the api.<p>
|
* Call the endpoint "/resolve" of the api.<p>
|
||||||
*
|
* <p>
|
||||||
* See https://developers.soundcloud.com/docs/api/reference#resolve
|
* See https://developers.soundcloud.com/docs/api/reference#resolve
|
||||||
*/
|
*/
|
||||||
public static JsonObject resolveFor(Downloader downloader, String url) throws IOException, ExtractionException {
|
public static JsonObject resolveFor(Downloader downloader, String url) throws IOException, ExtractionException {
|
||||||
|
@ -3,8 +3,8 @@ package org.schabi.newpipe.extractor.services.soundcloud;
|
|||||||
import com.grack.nanojson.JsonObject;
|
import com.grack.nanojson.JsonObject;
|
||||||
import com.grack.nanojson.JsonParser;
|
import com.grack.nanojson.JsonParser;
|
||||||
import com.grack.nanojson.JsonParserException;
|
import com.grack.nanojson.JsonParserException;
|
||||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
|
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package org.schabi.newpipe.extractor.services.soundcloud;
|
package org.schabi.newpipe.extractor.services.soundcloud;
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
|
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||||
import org.schabi.newpipe.extractor.utils.Parser;
|
import org.schabi.newpipe.extractor.utils.Parser;
|
||||||
import org.schabi.newpipe.extractor.utils.Utils;
|
import org.schabi.newpipe.extractor.utils.Utils;
|
||||||
|
|
||||||
|
@ -4,7 +4,10 @@ import com.grack.nanojson.JsonArray;
|
|||||||
import com.grack.nanojson.JsonObject;
|
import com.grack.nanojson.JsonObject;
|
||||||
import com.grack.nanojson.JsonParser;
|
import com.grack.nanojson.JsonParser;
|
||||||
import com.grack.nanojson.JsonParserException;
|
import com.grack.nanojson.JsonParserException;
|
||||||
import org.schabi.newpipe.extractor.*;
|
import org.schabi.newpipe.extractor.InfoItem;
|
||||||
|
import org.schabi.newpipe.extractor.InfoItemExtractor;
|
||||||
|
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||||
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
|
@ -25,7 +25,7 @@ public class SoundcloudSearchQueryHandlerFactory extends SearchQueryHandlerFacto
|
|||||||
try {
|
try {
|
||||||
String url = "https://api-v2.soundcloud.com/search";
|
String url = "https://api-v2.soundcloud.com/search";
|
||||||
|
|
||||||
if(contentFilter.size() > 0) {
|
if (contentFilter.size() > 0) {
|
||||||
switch (contentFilter.get(0)) {
|
switch (contentFilter.get(0)) {
|
||||||
case TRACKS:
|
case TRACKS:
|
||||||
url += "/tracks";
|
url += "/tracks";
|
||||||
@ -58,7 +58,7 @@ public class SoundcloudSearchQueryHandlerFactory extends SearchQueryHandlerFacto
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String[] getAvailableContentFilter() {
|
public String[] getAvailableContentFilter() {
|
||||||
return new String[] {
|
return new String[]{
|
||||||
ALL,
|
ALL,
|
||||||
TRACKS,
|
TRACKS,
|
||||||
USERS,
|
USERS,
|
||||||
|
@ -1,25 +1,20 @@
|
|||||||
package org.schabi.newpipe.extractor.services.soundcloud;
|
package org.schabi.newpipe.extractor.services.soundcloud;
|
||||||
|
|
||||||
import static java.util.Collections.singletonList;
|
|
||||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.AUDIO;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
import org.schabi.newpipe.extractor.linkhandler.*;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||||
|
|
||||||
|
import static java.util.Collections.singletonList;
|
||||||
|
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.AUDIO;
|
||||||
|
|
||||||
public class SoundcloudService extends StreamingService {
|
public class SoundcloudService extends StreamingService {
|
||||||
|
|
||||||
public SoundcloudService(int id) {
|
public SoundcloudService(int id) {
|
||||||
|
@ -4,7 +4,9 @@ import com.grack.nanojson.JsonArray;
|
|||||||
import com.grack.nanojson.JsonObject;
|
import com.grack.nanojson.JsonObject;
|
||||||
import com.grack.nanojson.JsonParser;
|
import com.grack.nanojson.JsonParser;
|
||||||
import com.grack.nanojson.JsonParserException;
|
import com.grack.nanojson.JsonParserException;
|
||||||
import org.schabi.newpipe.extractor.*;
|
import org.schabi.newpipe.extractor.MediaFormat;
|
||||||
|
import org.schabi.newpipe.extractor.NewPipe;
|
||||||
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package org.schabi.newpipe.extractor.services.soundcloud;
|
package org.schabi.newpipe.extractor.services.soundcloud;
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
|
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
||||||
import org.schabi.newpipe.extractor.utils.Parser;
|
import org.schabi.newpipe.extractor.utils.Parser;
|
||||||
import org.schabi.newpipe.extractor.utils.Utils;
|
import org.schabi.newpipe.extractor.utils.Utils;
|
||||||
|
|
||||||
|
@ -4,9 +4,9 @@ import com.grack.nanojson.JsonArray;
|
|||||||
import com.grack.nanojson.JsonObject;
|
import com.grack.nanojson.JsonObject;
|
||||||
import com.grack.nanojson.JsonParser;
|
import com.grack.nanojson.JsonParser;
|
||||||
import com.grack.nanojson.JsonParserException;
|
import com.grack.nanojson.JsonParserException;
|
||||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
|
||||||
import org.schabi.newpipe.extractor.NewPipe;
|
import org.schabi.newpipe.extractor.NewPipe;
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
|
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||||
|
@ -1,13 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.youtube;
|
package org.schabi.newpipe.extractor.services.youtube;
|
||||||
|
|
||||||
import static java.util.Arrays.asList;
|
|
||||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.AUDIO;
|
|
||||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.COMMENTS;
|
|
||||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.LIVE;
|
|
||||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.VIDEO;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||||
@ -15,28 +7,22 @@ import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
|||||||
import org.schabi.newpipe.extractor.feed.FeedExtractor;
|
import org.schabi.newpipe.extractor.feed.FeedExtractor;
|
||||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
import org.schabi.newpipe.extractor.linkhandler.*;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
|
||||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.localization.ContentCountry;
|
import org.schabi.newpipe.extractor.localization.ContentCountry;
|
||||||
import org.schabi.newpipe.extractor.localization.Localization;
|
import org.schabi.newpipe.extractor.localization.Localization;
|
||||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||||
import org.schabi.newpipe.extractor.services.youtube.extractors.*;
|
import org.schabi.newpipe.extractor.services.youtube.extractors.*;
|
||||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeChannelLinkHandlerFactory;
|
import org.schabi.newpipe.extractor.services.youtube.linkHandler.*;
|
||||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeCommentsLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubePlaylistLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeSearchQueryHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeStreamLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeTrendingLinkHandlerFactory;
|
|
||||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static java.util.Arrays.asList;
|
||||||
|
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.*;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Created by Christian Schabesberger on 23.08.15.
|
* Created by Christian Schabesberger on 23.08.15.
|
||||||
|
@ -3,7 +3,6 @@ package org.schabi.newpipe.extractor.services.youtube.extractors;
|
|||||||
import com.grack.nanojson.JsonArray;
|
import com.grack.nanojson.JsonArray;
|
||||||
import com.grack.nanojson.JsonObject;
|
import com.grack.nanojson.JsonObject;
|
||||||
import com.grack.nanojson.JsonParser;
|
import com.grack.nanojson.JsonParser;
|
||||||
import org.schabi.newpipe.extractor.NewPipe;
|
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||||
import org.schabi.newpipe.extractor.comments.CommentsInfoItem;
|
import org.schabi.newpipe.extractor.comments.CommentsInfoItem;
|
||||||
@ -22,7 +21,9 @@ import javax.annotation.Nonnull;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.net.URLEncoder;
|
import java.net.URLEncoder;
|
||||||
import java.util.*;
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import static java.util.Collections.singletonList;
|
import static java.util.Collections.singletonList;
|
||||||
@ -65,7 +66,7 @@ public class YoutubeCommentsExtractor extends CommentsExtractor {
|
|||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
if(arr.isEmpty()) {
|
if (arr.isEmpty()) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
String continuation;
|
String continuation;
|
||||||
@ -111,7 +112,7 @@ public class YoutubeCommentsExtractor extends CommentsExtractor {
|
|||||||
JsonArray contents;
|
JsonArray contents;
|
||||||
try {
|
try {
|
||||||
contents = JsonUtils.getArray(ajaxJson, "response.continuationContents.commentSectionContinuation.items");
|
contents = JsonUtils.getArray(ajaxJson, "response.continuationContents.commentSectionContinuation.items");
|
||||||
}catch(Exception e) {
|
} catch (Exception e) {
|
||||||
//no comments
|
//no comments
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -119,12 +120,12 @@ public class YoutubeCommentsExtractor extends CommentsExtractor {
|
|||||||
List<Object> comments;
|
List<Object> comments;
|
||||||
try {
|
try {
|
||||||
comments = JsonUtils.getValues(contents, "commentThreadRenderer.comment.commentRenderer");
|
comments = JsonUtils.getValues(contents, "commentThreadRenderer.comment.commentRenderer");
|
||||||
}catch(Exception e) {
|
} catch (Exception e) {
|
||||||
throw new ParsingException("unable to get parse youtube comments", e);
|
throw new ParsingException("unable to get parse youtube comments", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
for(Object c: comments) {
|
for (Object c : comments) {
|
||||||
if(c instanceof JsonObject) {
|
if (c instanceof JsonObject) {
|
||||||
CommentsInfoItemExtractor extractor = new YoutubeCommentsInfoItemExtractor((JsonObject) c, getUrl(), getTimeAgoParser());
|
CommentsInfoItemExtractor extractor = new YoutubeCommentsInfoItemExtractor((JsonObject) c, getUrl(), getTimeAgoParser());
|
||||||
collector.commit(extractor);
|
collector.commit(extractor);
|
||||||
}
|
}
|
||||||
@ -132,7 +133,7 @@ public class YoutubeCommentsExtractor extends CommentsExtractor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void fetchTitle(JsonArray contents) {
|
private void fetchTitle(JsonArray contents) {
|
||||||
if(null == title) {
|
if (title == null) {
|
||||||
try {
|
try {
|
||||||
title = getYoutubeText(JsonUtils.getObject(contents.getObject(0), "commentThreadRenderer.commentTargetTitle"));
|
title = getYoutubeText(JsonUtils.getObject(contents.getObject(0), "commentThreadRenderer.commentTargetTitle"));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
@ -198,7 +199,7 @@ public class YoutubeCommentsExtractor extends CommentsExtractor {
|
|||||||
try {
|
try {
|
||||||
JsonArray arr = JsonUtils.getArray(object, "runs");
|
JsonArray arr = JsonUtils.getArray(object, "runs");
|
||||||
String result = "";
|
String result = "";
|
||||||
for(int i=0; i<arr.size();i++) {
|
for (int i = 0; i < arr.size(); i++) {
|
||||||
result = result + JsonUtils.getString(arr.getObject(i), "text");
|
result = result + JsonUtils.getString(arr.getObject(i), "text");
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
|
@ -4,8 +4,8 @@ import com.grack.nanojson.JsonArray;
|
|||||||
import com.grack.nanojson.JsonObject;
|
import com.grack.nanojson.JsonObject;
|
||||||
import org.schabi.newpipe.extractor.comments.CommentsInfoItemExtractor;
|
import org.schabi.newpipe.extractor.comments.CommentsInfoItemExtractor;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
|
||||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||||
|
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||||
import org.schabi.newpipe.extractor.utils.Utils;
|
import org.schabi.newpipe.extractor.utils.Utils;
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||||||
@Override
|
@Override
|
||||||
public String getBannerUrl() {
|
public String getBannerUrl() {
|
||||||
return ""; // Banner can't be handled by frontend right now.
|
return ""; // Banner can't be handled by frontend right now.
|
||||||
// Whoever is willing to implement this should also implement this in the fornt end
|
// Whoever is willing to implement this should also implement it in the frontend.
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -157,7 +157,7 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private String getNextPageUrlFromAjax(final JsonObject pageJson, final String pageUrl)
|
private String getNextPageUrlFromAjax(final JsonObject pageJson, final String pageUrl)
|
||||||
throws ParsingException{
|
throws ParsingException {
|
||||||
String nextPageHtml = pageJson.getString("load_more_widget_html");
|
String nextPageHtml = pageJson.getString("load_more_widget_html");
|
||||||
if (!nextPageHtml.isEmpty()) {
|
if (!nextPageHtml.isEmpty()) {
|
||||||
return getNextPageUrlFrom(Jsoup.parse(nextPageHtml, pageUrl));
|
return getNextPageUrlFrom(Jsoup.parse(nextPageHtml, pageUrl));
|
||||||
@ -191,7 +191,7 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||||||
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
||||||
|
|
||||||
for (final Element li : element.children()) {
|
for (final Element li : element.children()) {
|
||||||
if(isDeletedItem(li)) {
|
if (isDeletedItem(li)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -242,7 +242,7 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||||||
|
|
||||||
private Element getUploaderLink() {
|
private Element getUploaderLink() {
|
||||||
// should always be present since we filter deleted items
|
// should always be present since we filter deleted items
|
||||||
if(uploaderLink == null) {
|
if (uploaderLink == null) {
|
||||||
uploaderLink = li.select("div[class=pl-video-owner] a").first();
|
uploaderLink = li.select("div[class=pl-video-owner] a").first();
|
||||||
}
|
}
|
||||||
return uploaderLink;
|
return uploaderLink;
|
||||||
@ -284,6 +284,7 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if the playlist item is deleted
|
* Check if the playlist item is deleted
|
||||||
|
*
|
||||||
* @param li the list item
|
* @param li the list item
|
||||||
* @return true if the item is deleted
|
* @return true if the item is deleted
|
||||||
*/
|
*/
|
||||||
|
@ -53,7 +53,7 @@ public class YoutubePlaylistInfoItemExtractor implements PlaylistInfoItemExtract
|
|||||||
.select("ul[class=\"yt-lockup-meta-info\"]")
|
.select("ul[class=\"yt-lockup-meta-info\"]")
|
||||||
.select("li").select("a").first();
|
.select("li").select("a").first();
|
||||||
|
|
||||||
if(a != null) {
|
if (a != null) {
|
||||||
return a.attr("abs:href");
|
return a.attr("abs:href");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -114,7 +114,8 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||||||
String name = null;
|
String name = null;
|
||||||
try {
|
try {
|
||||||
name = doc.select("meta[name=title]").attr(CONTENT);
|
name = doc.select("meta[name=title]").attr(CONTENT);
|
||||||
} catch (Exception ignored) {}
|
} catch (Exception ignored) {
|
||||||
|
}
|
||||||
|
|
||||||
if (name == null) {
|
if (name == null) {
|
||||||
throw new ParsingException("Could not get name", e);
|
throw new ParsingException("Could not get name", e);
|
||||||
@ -135,7 +136,8 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||||||
String uploadDate = null;
|
String uploadDate = null;
|
||||||
try {
|
try {
|
||||||
uploadDate = doc.select("meta[itemprop=datePublished]").attr(CONTENT);
|
uploadDate = doc.select("meta[itemprop=datePublished]").attr(CONTENT);
|
||||||
} catch (Exception ignored) {}
|
} catch (Exception ignored) {
|
||||||
|
}
|
||||||
|
|
||||||
if (uploadDate == null) {
|
if (uploadDate == null) {
|
||||||
throw new ParsingException("Could not get upload date", e);
|
throw new ParsingException("Could not get upload date", e);
|
||||||
@ -217,7 +219,7 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||||||
private String parseHtmlAndGetFullLinks(String descriptionHtml)
|
private String parseHtmlAndGetFullLinks(String descriptionHtml)
|
||||||
throws MalformedURLException, UnsupportedEncodingException, ParsingException {
|
throws MalformedURLException, UnsupportedEncodingException, ParsingException {
|
||||||
final Document description = Jsoup.parse(descriptionHtml, getUrl());
|
final Document description = Jsoup.parse(descriptionHtml, getUrl());
|
||||||
for(Element a : description.select("a")) {
|
for (Element a : description.select("a")) {
|
||||||
final String rawUrl = a.attr("abs:href");
|
final String rawUrl = a.attr("abs:href");
|
||||||
final URL redirectLink = new URL(rawUrl);
|
final URL redirectLink = new URL(rawUrl);
|
||||||
|
|
||||||
@ -242,22 +244,22 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||||||
// getUrl() is https://www.youtube.com/watch?v=..., never youtu.be, never &t=.
|
// getUrl() is https://www.youtube.com/watch?v=..., never youtu.be, never &t=.
|
||||||
a.attr("href", getUrl() + setTimestamp);
|
a.attr("href", getUrl() + setTimestamp);
|
||||||
|
|
||||||
} else if((queryString = redirectLink.getQuery()) != null) {
|
} else if ((queryString = redirectLink.getQuery()) != null) {
|
||||||
// if the query string is null we are not dealing with a redirect link,
|
// if the query string is null we are not dealing with a redirect link,
|
||||||
// so we don't need to override it.
|
// so we don't need to override it.
|
||||||
final String link =
|
final String link =
|
||||||
Parser.compatParseMap(queryString).get("q");
|
Parser.compatParseMap(queryString).get("q");
|
||||||
|
|
||||||
if(link != null) {
|
if (link != null) {
|
||||||
// if link is null the a tag is a hashtag.
|
// if link is null the a tag is a hashtag.
|
||||||
// They refer to the youtube search. We do not handle them.
|
// They refer to the youtube search. We do not handle them.
|
||||||
a.text(link);
|
a.text(link);
|
||||||
a.attr("href", link);
|
a.attr("href", link);
|
||||||
} else if(redirectLink.toString().contains("https://www.youtube.com/")) {
|
} else if (redirectLink.toString().contains("https://www.youtube.com/")) {
|
||||||
a.text(redirectLink.toString());
|
a.text(redirectLink.toString());
|
||||||
a.attr("href", redirectLink.toString());
|
a.attr("href", redirectLink.toString());
|
||||||
}
|
}
|
||||||
} else if(redirectLink.toString().contains("https://www.youtube.com/")) {
|
} else if (redirectLink.toString().contains("https://www.youtube.com/")) {
|
||||||
descriptionHtml = descriptionHtml.replace(rawUrl, redirectLink.toString());
|
descriptionHtml = descriptionHtml.replace(rawUrl, redirectLink.toString());
|
||||||
a.text(redirectLink.toString());
|
a.text(redirectLink.toString());
|
||||||
a.attr("href", redirectLink.toString());
|
a.attr("href", redirectLink.toString());
|
||||||
|
@ -3,8 +3,8 @@ package org.schabi.newpipe.extractor.services.youtube.extractors;
|
|||||||
import org.jsoup.nodes.Element;
|
import org.jsoup.nodes.Element;
|
||||||
import org.jsoup.select.Elements;
|
import org.jsoup.select.Elements;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
|
||||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||||
|
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper;
|
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper;
|
||||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemExtractor;
|
import org.schabi.newpipe.extractor.stream.StreamInfoItemExtractor;
|
||||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||||
@ -42,6 +42,7 @@ public class YoutubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates an extractor of StreamInfoItems from a YouTube page.
|
* Creates an extractor of StreamInfoItems from a YouTube page.
|
||||||
|
*
|
||||||
* @param item The page element
|
* @param item The page element
|
||||||
* @param timeAgoParser A parser of the textual dates or {@code null}.
|
* @param timeAgoParser A parser of the textual dates or {@code null}.
|
||||||
*/
|
*/
|
||||||
@ -68,10 +69,10 @@ public class YoutubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
|||||||
|
|
||||||
private boolean isPremiumVideo() {
|
private boolean isPremiumVideo() {
|
||||||
Element premiumSpan = item.select("span[class=\"standalone-collection-badge-renderer-red-text\"]").first();
|
Element premiumSpan = item.select("span[class=\"standalone-collection-badge-renderer-red-text\"]").first();
|
||||||
if(premiumSpan == null) return false;
|
if (premiumSpan == null) return false;
|
||||||
|
|
||||||
// if this span has text it most likely says ("Free Video") so we can play this
|
// if this span has text it most likely says ("Free Video") so we can play this
|
||||||
if(premiumSpan.hasText()) return false;
|
if (premiumSpan.hasText()) return false;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -233,7 +234,7 @@ public class YoutubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
|||||||
return Long.parseLong(Utils.removeNonDigitCharacters(input));
|
return Long.parseLong(Utils.removeNonDigitCharacters(input));
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
// if this happens the video probably has no views
|
// if this happens the video probably has no views
|
||||||
if (!input.isEmpty()){
|
if (!input.isEmpty()) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,9 +3,9 @@ package org.schabi.newpipe.extractor.services.youtube.extractors;
|
|||||||
import com.grack.nanojson.JsonArray;
|
import com.grack.nanojson.JsonArray;
|
||||||
import com.grack.nanojson.JsonParser;
|
import com.grack.nanojson.JsonParser;
|
||||||
import com.grack.nanojson.JsonParserException;
|
import com.grack.nanojson.JsonParserException;
|
||||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
|
||||||
import org.schabi.newpipe.extractor.NewPipe;
|
import org.schabi.newpipe.extractor.NewPipe;
|
||||||
import org.schabi.newpipe.extractor.StreamingService;
|
import org.schabi.newpipe.extractor.StreamingService;
|
||||||
|
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||||
@ -57,12 +57,12 @@ public class YoutubeSuggestionExtractor extends SuggestionExtractor {
|
|||||||
|
|
||||||
String response = dl.get(url, getExtractorLocalization()).responseBody();
|
String response = dl.get(url, getExtractorLocalization()).responseBody();
|
||||||
// trim JSONP part "JP(...)"
|
// trim JSONP part "JP(...)"
|
||||||
response = response.substring(3, response.length()-1);
|
response = response.substring(3, response.length() - 1);
|
||||||
try {
|
try {
|
||||||
JsonArray collection = JsonParser.array().from(response).getArray(1, new JsonArray());
|
JsonArray collection = JsonParser.array().from(response).getArray(1, new JsonArray());
|
||||||
for (Object suggestion : collection) {
|
for (Object suggestion : collection) {
|
||||||
if (!(suggestion instanceof JsonArray)) continue;
|
if (!(suggestion instanceof JsonArray)) continue;
|
||||||
String suggestionStr = ((JsonArray)suggestion).getString(0);
|
String suggestionStr = ((JsonArray) suggestion).getString(0);
|
||||||
if (suggestionStr == null) continue;
|
if (suggestionStr == null) continue;
|
||||||
suggestions.add(suggestionStr);
|
suggestions.add(suggestionStr);
|
||||||
}
|
}
|
||||||
|
@ -88,8 +88,8 @@ public class YoutubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
|
|||||||
|
|
||||||
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
||||||
|
|
||||||
for(Element ul : uls) {
|
for (Element ul : uls) {
|
||||||
for(final Element li : ul.children()) {
|
for (final Element li : ul.children()) {
|
||||||
final Element el = li.select("div[class*=\"yt-lockup-dismissable\"]").first();
|
final Element el = li.select("div[class*=\"yt-lockup-dismissable\"]").first();
|
||||||
collector.commit(new YoutubeStreamInfoItemExtractor(li, timeAgoParser) {
|
collector.commit(new YoutubeStreamInfoItemExtractor(li, timeAgoParser) {
|
||||||
@Override
|
@Override
|
||||||
|
@ -31,7 +31,7 @@ public class YoutubePlaylistLinkHandlerFactory extends ListLinkHandlerFactory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
String path = urlObj.getPath();
|
String path = urlObj.getPath();
|
||||||
if (!path.equals("/watch" ) && !path.equals("/playlist")) {
|
if (!path.equals("/watch") && !path.equals("/playlist")) {
|
||||||
throw new ParsingException("the url given is neither a video nor a playlist URL");
|
throw new ParsingException("the url given is neither a video nor a playlist URL");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ public class YoutubeSearchQueryHandlerFactory extends SearchQueryHandlerFactory
|
|||||||
final String url = "https://www.youtube.com/results"
|
final String url = "https://www.youtube.com/results"
|
||||||
+ "?q=" + URLEncoder.encode(searchString, CHARSET_UTF_8);
|
+ "?q=" + URLEncoder.encode(searchString, CHARSET_UTF_8);
|
||||||
|
|
||||||
if(contentFilters.size() > 0) {
|
if (contentFilters.size() > 0) {
|
||||||
switch (contentFilters.get(0)) {
|
switch (contentFilters.get(0)) {
|
||||||
case VIDEOS: return url + "&sp=EgIQAVAU";
|
case VIDEOS: return url + "&sp=EgIQAVAU";
|
||||||
case CHANNELS: return url + "&sp=EgIQAlAU";
|
case CHANNELS: return url + "&sp=EgIQAlAU";
|
||||||
@ -44,7 +44,7 @@ public class YoutubeSearchQueryHandlerFactory extends SearchQueryHandlerFactory
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String[] getAvailableContentFilter() {
|
public String[] getAvailableContentFilter() {
|
||||||
return new String[] {
|
return new String[]{
|
||||||
ALL,
|
ALL,
|
||||||
VIDEOS,
|
VIDEOS,
|
||||||
CHANNELS,
|
CHANNELS,
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.stream;
|
package org.schabi.newpipe.extractor.stream;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public final class Frameset {
|
public final class Frameset {
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
package org.schabi.newpipe.extractor.stream;
|
package org.schabi.newpipe.extractor.stream;
|
||||||
|
|
||||||
|
import org.schabi.newpipe.extractor.MediaFormat;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.MediaFormat;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a stream object from url, format and optional torrent url
|
* Creates a stream object from url, format and optional torrent url
|
||||||
*/
|
*/
|
||||||
|
@ -27,7 +27,6 @@ import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
|||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
|
||||||
import org.schabi.newpipe.extractor.utils.Parser;
|
import org.schabi.newpipe.extractor.utils.Parser;
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
@ -77,6 +76,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* This will return the url to the thumbnail of the stream. Try to return the medium resolution here.
|
* This will return the url to the thumbnail of the stream. Try to return the medium resolution here.
|
||||||
|
*
|
||||||
* @return The url of the thumbnail.
|
* @return The url of the thumbnail.
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -85,6 +85,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* This is the stream description.
|
* This is the stream description.
|
||||||
|
*
|
||||||
* @return The description of the stream/video or Description.emptyDescription if the description is empty.
|
* @return The description of the stream/video or Description.emptyDescription if the description is empty.
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -93,6 +94,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the age limit.
|
* Get the age limit.
|
||||||
|
*
|
||||||
* @return The age which limits the content or {@value NO_AGE_LIMIT} if there is no limit
|
* @return The age which limits the content or {@value NO_AGE_LIMIT} if there is no limit
|
||||||
* @throws ParsingException if an error occurs while parsing
|
* @throws ParsingException if an error occurs while parsing
|
||||||
*/
|
*/
|
||||||
@ -100,6 +102,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* This should return the length of a video in seconds.
|
* This should return the length of a video in seconds.
|
||||||
|
*
|
||||||
* @return The length of the stream in seconds.
|
* @return The length of the stream in seconds.
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -109,6 +112,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
* If the url you are currently handling contains a time stamp/seek, you can return the
|
* If the url you are currently handling contains a time stamp/seek, you can return the
|
||||||
* position it represents here.
|
* position it represents here.
|
||||||
* If the url has no time stamp simply return zero.
|
* If the url has no time stamp simply return zero.
|
||||||
|
*
|
||||||
* @return the timestamp in seconds
|
* @return the timestamp in seconds
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -117,22 +121,25 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
/**
|
/**
|
||||||
* The count of how many people have watched the video/listened to the audio stream.
|
* The count of how many people have watched the video/listened to the audio stream.
|
||||||
* If the current stream has no view count or its not available simply return -1
|
* If the current stream has no view count or its not available simply return -1
|
||||||
|
*
|
||||||
* @return amount of views.
|
* @return amount of views.
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
public abstract long getViewCount() throws ParsingException;
|
public abstract long getViewCount() throws ParsingException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The Amount of likes a video/audio stream got.
|
* The amount of likes a video/audio stream got.
|
||||||
* If the current stream has no likes or its not available simply return -1
|
* If the current stream has no likes or its not available simply return -1
|
||||||
|
*
|
||||||
* @return the amount of likes the stream got
|
* @return the amount of likes the stream got
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
public abstract long getLikeCount() throws ParsingException;
|
public abstract long getLikeCount() throws ParsingException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The Amount of dislikes a video/audio stream got.
|
* The amount of dislikes a video/audio stream got.
|
||||||
* If the current stream has no dislikes or its not available simply return -1
|
* If the current stream has no dislikes or its not available simply return -1
|
||||||
|
*
|
||||||
* @return the amount of likes the stream got
|
* @return the amount of likes the stream got
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -144,6 +151,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
* <a href="https://teamnewpipe.github.io/documentation/03_Implement_a_service/#channel">ChannelExtractor</a>,
|
* <a href="https://teamnewpipe.github.io/documentation/03_Implement_a_service/#channel">ChannelExtractor</a>,
|
||||||
* so be sure to implement that one before you return a value here, otherwise NewPipe will crash if one selects
|
* so be sure to implement that one before you return a value here, otherwise NewPipe will crash if one selects
|
||||||
* this url.
|
* this url.
|
||||||
|
*
|
||||||
* @return the url to the page of the creator/uploader of the stream or an empty String
|
* @return the url to the page of the creator/uploader of the stream or an empty String
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -153,6 +161,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
/**
|
/**
|
||||||
* The name of the creator/uploader of the stream.
|
* The name of the creator/uploader of the stream.
|
||||||
* If the name is not available you can simply return an empty string.
|
* If the name is not available you can simply return an empty string.
|
||||||
|
*
|
||||||
* @return the name of the creator/uploader of the stream or an empty String
|
* @return the name of the creator/uploader of the stream or an empty String
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -162,6 +171,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
/**
|
/**
|
||||||
* The url to the image file/profile picture/avatar of the creator/uploader of the stream.
|
* The url to the image file/profile picture/avatar of the creator/uploader of the stream.
|
||||||
* If the url is not available you can return an empty String.
|
* If the url is not available you can return an empty String.
|
||||||
|
*
|
||||||
* @return The url of the image file of the uploader or an empty String
|
* @return The url of the image file of the uploader or an empty String
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -171,20 +181,24 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
/**
|
/**
|
||||||
* Get the dash mpd url. If you don't know what a dash MPD is you can read about it
|
* Get the dash mpd url. If you don't know what a dash MPD is you can read about it
|
||||||
* <a href="https://www.brendanlong.com/the-structure-of-an-mpeg-dash-mpd.html">here</a>.
|
* <a href="https://www.brendanlong.com/the-structure-of-an-mpeg-dash-mpd.html">here</a>.
|
||||||
|
*
|
||||||
* @return the url as a string or an empty string
|
* @return the url as a string or an empty string
|
||||||
* @throws ParsingException if an error occurs while reading
|
* @throws ParsingException if an error occurs while reading
|
||||||
*/
|
*/
|
||||||
@Nonnull public abstract String getDashMpdUrl() throws ParsingException;
|
@Nonnull
|
||||||
|
public abstract String getDashMpdUrl() throws ParsingException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* I am not sure if this is in use, and how this is used. However the frontend is missing support
|
* I am not sure if this is in use, and how this is used. However the frontend is missing support
|
||||||
* for HLS streams. Prove me if I am wrong. Please open an
|
* for HLS streams. Prove me if I am wrong. Please open an
|
||||||
* <a href="https://github.com/teamnewpipe/newpipe/issues">issue</a>,
|
* <a href="https://github.com/teamnewpipe/newpipe/issues">issue</a>,
|
||||||
* or fix this description if you know whats up with this.
|
* or fix this description if you know whats up with this.
|
||||||
|
*
|
||||||
* @return The Url to the hls stream.
|
* @return The Url to the hls stream.
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@Nonnull public abstract String getHlsUrl() throws ParsingException;
|
@Nonnull
|
||||||
|
public abstract String getHlsUrl() throws ParsingException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This should return a list of available
|
* This should return a list of available
|
||||||
@ -192,6 +206,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
* You can also return null or an empty list, however be aware that if you don't return anything
|
* You can also return null or an empty list, however be aware that if you don't return anything
|
||||||
* in getVideoStreams(), getVideoOnlyStreams() and getDashMpdUrl() either the Collector will handle this as
|
* in getVideoStreams(), getVideoOnlyStreams() and getDashMpdUrl() either the Collector will handle this as
|
||||||
* a failed extraction procedure.
|
* a failed extraction procedure.
|
||||||
|
*
|
||||||
* @return a list of audio only streams in the format of AudioStream
|
* @return a list of audio only streams in the format of AudioStream
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
* @throws ExtractionException
|
* @throws ExtractionException
|
||||||
@ -205,6 +220,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
* You can also return null or an empty list, however be aware that if you don't return anything
|
* You can also return null or an empty list, however be aware that if you don't return anything
|
||||||
* in getAudioStreams(), getVideoOnlyStreams() and getDashMpdUrl() either the Collector will handle this as
|
* in getAudioStreams(), getVideoOnlyStreams() and getDashMpdUrl() either the Collector will handle this as
|
||||||
* a failed extraction procedure.
|
* a failed extraction procedure.
|
||||||
|
*
|
||||||
* @return a list of combined video and streams in the format of AudioStream
|
* @return a list of combined video and streams in the format of AudioStream
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
* @throws ExtractionException
|
* @throws ExtractionException
|
||||||
@ -218,6 +234,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
* You can also return null or an empty list, however be aware that if you don't return anything
|
* You can also return null or an empty list, however be aware that if you don't return anything
|
||||||
* in getAudioStreams(), getVideoStreams() and getDashMpdUrl() either the Collector will handle this as
|
* in getAudioStreams(), getVideoStreams() and getDashMpdUrl() either the Collector will handle this as
|
||||||
* a failed extraction procedure.
|
* a failed extraction procedure.
|
||||||
|
*
|
||||||
* @return a list of video and streams in the format of AudioStream
|
* @return a list of video and streams in the format of AudioStream
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
* @throws ExtractionException
|
* @throws ExtractionException
|
||||||
@ -228,6 +245,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
* This will return a list of available
|
* This will return a list of available
|
||||||
* <a href="https://teamnewpipe.github.io/NewPipeExtractor/javadoc/org/schabi/newpipe/extractor/stream/Subtitles.html">Subtitles</a>s.
|
* <a href="https://teamnewpipe.github.io/NewPipeExtractor/javadoc/org/schabi/newpipe/extractor/stream/Subtitles.html">Subtitles</a>s.
|
||||||
* If no subtitles are available an empty list can returned.
|
* If no subtitles are available an empty list can returned.
|
||||||
|
*
|
||||||
* @return a list of available subtitles or an empty list
|
* @return a list of available subtitles or an empty list
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
* @throws ExtractionException
|
* @throws ExtractionException
|
||||||
@ -240,6 +258,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
* <a href="https://teamnewpipe.github.io/NewPipeExtractor/javadoc/org/schabi/newpipe/extractor/stream/Subtitles.html">Subtitles</a>s.
|
* <a href="https://teamnewpipe.github.io/NewPipeExtractor/javadoc/org/schabi/newpipe/extractor/stream/Subtitles.html">Subtitles</a>s.
|
||||||
* given by a specific type.
|
* given by a specific type.
|
||||||
* If no subtitles in that specific format are available an empty list can returned.
|
* If no subtitles in that specific format are available an empty list can returned.
|
||||||
|
*
|
||||||
* @param format the media format by which the subtitles should be filtered
|
* @param format the media format by which the subtitles should be filtered
|
||||||
* @return a list of available subtitles or an empty list
|
* @return a list of available subtitles or an empty list
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
@ -250,15 +269,17 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the <a href="https://teamnewpipe.github.io/NewPipeExtractor/javadoc/">StreamType</a>.
|
* Get the <a href="https://teamnewpipe.github.io/NewPipeExtractor/javadoc/">StreamType</a>.
|
||||||
|
*
|
||||||
* @return the type of the stream
|
* @return the type of the stream
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
public abstract StreamType getStreamType() throws ParsingException;
|
public abstract StreamType getStreamType() throws ParsingException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* should return the url of the next stream. NewPipe will automatically play
|
* Should return the url of the next stream. NewPipe will automatically play
|
||||||
* the next stream if the user wants that.
|
* the next stream if the user wants that.
|
||||||
* If the next stream is is not available simply return null
|
* If the next stream is is not available simply return null
|
||||||
|
*
|
||||||
* @return the InfoItem of the next stream
|
* @return the InfoItem of the next stream
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
* @throws ExtractionException
|
* @throws ExtractionException
|
||||||
@ -270,7 +291,8 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
* streams. If you don't like suggested streams you should implement them anyway since they can
|
* streams. If you don't like suggested streams you should implement them anyway since they can
|
||||||
* be disabled by the user later in the frontend.
|
* be disabled by the user later in the frontend.
|
||||||
* This list MUST NOT contain the next available video as this should be return through getNextStream()
|
* This list MUST NOT contain the next available video as this should be return through getNextStream()
|
||||||
* If is is not available simply return null
|
* If it is not available simply return null
|
||||||
|
*
|
||||||
* @return a list of InfoItems showing the related videos/streams
|
* @return a list of InfoItems showing the related videos/streams
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
* @throws ExtractionException
|
* @throws ExtractionException
|
||||||
@ -279,6 +301,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Should return a list of Frameset object that contains preview of stream frames
|
* Should return a list of Frameset object that contains preview of stream frames
|
||||||
|
*
|
||||||
* @return list of preview frames or empty list if frames preview is not supported or not found for specified stream
|
* @return list of preview frames or empty list if frames preview is not supported or not found for specified stream
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
* @throws ExtractionException
|
* @throws ExtractionException
|
||||||
@ -301,9 +324,10 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Override this function if the format of time stamp in the url is not the same format as that form youtube.
|
* Override this function if the format of time stamp in the url is not the same format as that form youtube.
|
||||||
* Honestly I don't even know the time stamp fromat of youtube.
|
* Honestly I don't even know the time stamp format of YouTube.
|
||||||
|
*
|
||||||
* @param regexPattern
|
* @param regexPattern
|
||||||
* @return the sime stamp/seek for the video in seconds
|
* @return the time stamp/seek for the video in seconds
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
protected long getTimestampSeconds(String regexPattern) throws ParsingException {
|
protected long getTimestampSeconds(String regexPattern) throws ParsingException {
|
||||||
@ -311,10 +335,10 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
try {
|
try {
|
||||||
timeStamp = Parser.matchGroup1(regexPattern, getOriginalUrl());
|
timeStamp = Parser.matchGroup1(regexPattern, getOriginalUrl());
|
||||||
} catch (Parser.RegexException e) {
|
} catch (Parser.RegexException e) {
|
||||||
// catch this instantly since an url does not necessarily have to have a time stamp
|
// catch this instantly since a url does not necessarily have a timestamp
|
||||||
|
|
||||||
// -2 because well the testing system will then know its the regex that failed :/
|
// -2 because the testing system will consequently know that the regex failed
|
||||||
// not good i know
|
// not good, I know
|
||||||
return -2;
|
return -2;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -357,6 +381,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
* If the host is not available, or if the service doesn't use
|
* If the host is not available, or if the service doesn't use
|
||||||
* a federated system, but a centralised system,
|
* a federated system, but a centralised system,
|
||||||
* you can simply return an empty string.
|
* you can simply return an empty string.
|
||||||
|
*
|
||||||
* @return the host of the stream or an empty String.
|
* @return the host of the stream or an empty String.
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -366,6 +391,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
/**
|
/**
|
||||||
* The privacy of the stream (Eg. Public, Private, Unlisted…).
|
* The privacy of the stream (Eg. Public, Private, Unlisted…).
|
||||||
* If the privacy is not available you can simply return an empty string.
|
* If the privacy is not available you can simply return an empty string.
|
||||||
|
*
|
||||||
* @return the privacy of the stream or an empty String.
|
* @return the privacy of the stream or an empty String.
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -375,6 +401,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
/**
|
/**
|
||||||
* The name of the category of the stream.
|
* The name of the category of the stream.
|
||||||
* If the category is not available you can simply return an empty string.
|
* If the category is not available you can simply return an empty string.
|
||||||
|
*
|
||||||
* @return the category of the stream or an empty String.
|
* @return the category of the stream or an empty String.
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -384,6 +411,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
/**
|
/**
|
||||||
* The name of the licence of the stream.
|
* The name of the licence of the stream.
|
||||||
* If the licence is not available you can simply return an empty string.
|
* If the licence is not available you can simply return an empty string.
|
||||||
|
*
|
||||||
* @return the licence of the stream or an empty String.
|
* @return the licence of the stream or an empty String.
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -395,6 +423,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
* If the language is not available you can simply return null.
|
* If the language is not available you can simply return null.
|
||||||
* If the language is provided by a language code, you can return
|
* If the language is provided by a language code, you can return
|
||||||
* new Locale(language_code);
|
* new Locale(language_code);
|
||||||
|
*
|
||||||
* @return the locale language of the stream or null.
|
* @return the locale language of the stream or null.
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -404,6 +433,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
/**
|
/**
|
||||||
* The list of tags of the stream.
|
* The list of tags of the stream.
|
||||||
* If the tag list is not available you can simply return an empty list.
|
* If the tag list is not available you can simply return an empty list.
|
||||||
|
*
|
||||||
* @return the list of tags of the stream or an empty list.
|
* @return the list of tags of the stream or an empty list.
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
@ -416,6 +446,7 @@ public abstract class StreamExtractor extends Extractor {
|
|||||||
* (support button).
|
* (support button).
|
||||||
* If the support information are not available,
|
* If the support information are not available,
|
||||||
* you can simply return an empty String.
|
* you can simply return an empty String.
|
||||||
|
*
|
||||||
* @return the support information of the stream or an empty String.
|
* @return the support information of the stream or an empty String.
|
||||||
* @throws ParsingException
|
* @throws ParsingException
|
||||||
*/
|
*/
|
||||||
|
@ -31,6 +31,7 @@ public interface StreamInfoItemExtractor extends InfoItemExtractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the stream type
|
* Get the stream type
|
||||||
|
*
|
||||||
* @return the stream type
|
* @return the stream type
|
||||||
* @throws ParsingException thrown if there is an error in the extraction
|
* @throws ParsingException thrown if there is an error in the extraction
|
||||||
*/
|
*/
|
||||||
@ -38,6 +39,7 @@ public interface StreamInfoItemExtractor extends InfoItemExtractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if the stream is an ad.
|
* Check if the stream is an ad.
|
||||||
|
*
|
||||||
* @return {@code true} if the stream is an ad.
|
* @return {@code true} if the stream is an ad.
|
||||||
* @throws ParsingException thrown if there is an error in the extraction
|
* @throws ParsingException thrown if there is an error in the extraction
|
||||||
*/
|
*/
|
||||||
@ -45,6 +47,7 @@ public interface StreamInfoItemExtractor extends InfoItemExtractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the stream duration in seconds
|
* Get the stream duration in seconds
|
||||||
|
*
|
||||||
* @return the stream duration in seconds
|
* @return the stream duration in seconds
|
||||||
* @throws ParsingException thrown if there is an error in the extraction
|
* @throws ParsingException thrown if there is an error in the extraction
|
||||||
*/
|
*/
|
||||||
@ -52,6 +55,7 @@ public interface StreamInfoItemExtractor extends InfoItemExtractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses the number of views
|
* Parses the number of views
|
||||||
|
*
|
||||||
* @return the number of views or -1 for live streams
|
* @return the number of views or -1 for live streams
|
||||||
* @throws ParsingException thrown if there is an error in the extraction
|
* @throws ParsingException thrown if there is an error in the extraction
|
||||||
*/
|
*/
|
||||||
@ -59,6 +63,7 @@ public interface StreamInfoItemExtractor extends InfoItemExtractor {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the uploader name
|
* Get the uploader name
|
||||||
|
*
|
||||||
* @return the uploader name
|
* @return the uploader name
|
||||||
* @throws ParsingException if parsing fails
|
* @throws ParsingException if parsing fails
|
||||||
*/
|
*/
|
||||||
|
@ -101,8 +101,8 @@ public class StreamInfoItemsCollector extends InfoItemsCollector<StreamInfoItem,
|
|||||||
|
|
||||||
public List<StreamInfoItem> getStreamInfoItemList() {
|
public List<StreamInfoItem> getStreamInfoItemList() {
|
||||||
List<StreamInfoItem> siiList = new Vector<>();
|
List<StreamInfoItem> siiList = new Vector<>();
|
||||||
for(InfoItem ii : super.getItems()) {
|
for (InfoItem ii : super.getItems()) {
|
||||||
if(ii instanceof StreamInfoItem) {
|
if (ii instanceof StreamInfoItem) {
|
||||||
siiList.add((StreamInfoItem) ii);
|
siiList.add((StreamInfoItem) ii);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -52,7 +52,7 @@ public class SubtitlesStream extends Stream implements Serializable {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equalStats(Stream cmp) {
|
public boolean equalStats(Stream cmp) {
|
||||||
return super.equalStats(cmp)&&
|
return super.equalStats(cmp) &&
|
||||||
cmp instanceof SubtitlesStream &&
|
cmp instanceof SubtitlesStream &&
|
||||||
code.equals(((SubtitlesStream) cmp).code) &&
|
code.equals(((SubtitlesStream) cmp).code) &&
|
||||||
autoGenerated == ((SubtitlesStream) cmp).autoGenerated;
|
autoGenerated == ((SubtitlesStream) cmp).autoGenerated;
|
||||||
|
@ -56,6 +56,7 @@ public class VideoStream extends Stream {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the video resolution
|
* Get the video resolution
|
||||||
|
*
|
||||||
* @return the video resolution
|
* @return the video resolution
|
||||||
*/
|
*/
|
||||||
public String getResolution() {
|
public String getResolution() {
|
||||||
@ -64,8 +65,9 @@ public class VideoStream extends Stream {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if the video is video only.
|
* Check if the video is video only.
|
||||||
*
|
* <p>
|
||||||
* Video only streams have no audio
|
* Video only streams have no audio
|
||||||
|
*
|
||||||
* @return {@code true} if this stream is vid
|
* @return {@code true} if this stream is vid
|
||||||
*/
|
*/
|
||||||
public boolean isVideoOnly() {
|
public boolean isVideoOnly() {
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
package org.schabi.newpipe.extractor.utils;
|
package org.schabi.newpipe.extractor.utils;
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
|
||||||
import org.schabi.newpipe.extractor.MediaFormat;
|
import org.schabi.newpipe.extractor.MediaFormat;
|
||||||
import org.schabi.newpipe.extractor.NewPipe;
|
import org.schabi.newpipe.extractor.NewPipe;
|
||||||
|
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||||
import org.schabi.newpipe.extractor.services.youtube.ItagItem;
|
import org.schabi.newpipe.extractor.services.youtube.ItagItem;
|
||||||
@ -109,11 +109,11 @@ public class DashMpdParser {
|
|||||||
* <p>
|
* <p>
|
||||||
* It has video, video only and audio streams and will only add to the list if it don't
|
* It has video, video only and audio streams and will only add to the list if it don't
|
||||||
* find a similar stream in the respective lists (calling {@link Stream#equalStats}).
|
* find a similar stream in the respective lists (calling {@link Stream#equalStats}).
|
||||||
*
|
* <p>
|
||||||
* Info about dash MPD can be found here
|
* Info about dash MPD can be found here
|
||||||
* @see <a href="https://www.brendanlong.com/the-structure-of-an-mpeg-dash-mpd.html">www.brendanlog.com</a>
|
|
||||||
*
|
*
|
||||||
* @param streamInfo where the parsed streams will be added
|
* @param streamInfo where the parsed streams will be added
|
||||||
|
* @see <a href="https://www.brendanlong.com/the-structure-of-an-mpeg-dash-mpd.html">www.brendanlog.com</a>
|
||||||
*/
|
*/
|
||||||
public static ParserResult getStreams(final StreamInfo streamInfo)
|
public static ParserResult getStreams(final StreamInfo streamInfo)
|
||||||
throws DashMpdParsingException, ReCaptchaException {
|
throws DashMpdParsingException, ReCaptchaException {
|
||||||
@ -160,7 +160,7 @@ public class DashMpdParser {
|
|||||||
final MediaFormat mediaFormat = MediaFormat.getFromMimeType(mimeType);
|
final MediaFormat mediaFormat = MediaFormat.getFromMimeType(mimeType);
|
||||||
|
|
||||||
if (itag.itagType.equals(ItagItem.ItagType.AUDIO)) {
|
if (itag.itagType.equals(ItagItem.ItagType.AUDIO)) {
|
||||||
if(segmentationList == null) {
|
if (segmentationList == null) {
|
||||||
final AudioStream audioStream = new AudioStream(url, mediaFormat, itag.avgBitrate);
|
final AudioStream audioStream = new AudioStream(url, mediaFormat, itag.avgBitrate);
|
||||||
if (!Stream.containSimilarStream(audioStream, streamInfo.getAudioStreams())) {
|
if (!Stream.containSimilarStream(audioStream, streamInfo.getAudioStreams())) {
|
||||||
audioStreams.add(audioStream);
|
audioStreams.add(audioStream);
|
||||||
@ -172,7 +172,7 @@ public class DashMpdParser {
|
|||||||
} else {
|
} else {
|
||||||
boolean isVideoOnly = itag.itagType.equals(ItagItem.ItagType.VIDEO_ONLY);
|
boolean isVideoOnly = itag.itagType.equals(ItagItem.ItagType.VIDEO_ONLY);
|
||||||
|
|
||||||
if(segmentationList == null) {
|
if (segmentationList == null) {
|
||||||
final VideoStream videoStream = new VideoStream(url,
|
final VideoStream videoStream = new VideoStream(url,
|
||||||
mediaFormat,
|
mediaFormat,
|
||||||
itag.resolutionString,
|
itag.resolutionString,
|
||||||
@ -191,7 +191,7 @@ public class DashMpdParser {
|
|||||||
itag.resolutionString,
|
itag.resolutionString,
|
||||||
isVideoOnly);
|
isVideoOnly);
|
||||||
|
|
||||||
if(isVideoOnly) {
|
if (isVideoOnly) {
|
||||||
segmentedVideoOnlyStreams.add(videoStream);
|
segmentedVideoOnlyStreams.add(videoStream);
|
||||||
} else {
|
} else {
|
||||||
segmentedVideoStreams.add(videoStream);
|
segmentedVideoStreams.add(videoStream);
|
||||||
|
@ -5,7 +5,6 @@ import org.schabi.newpipe.extractor.InfoItem;
|
|||||||
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||||
import org.schabi.newpipe.extractor.ListExtractor;
|
import org.schabi.newpipe.extractor.ListExtractor;
|
||||||
import org.schabi.newpipe.extractor.ListExtractor.InfoItemsPage;
|
import org.schabi.newpipe.extractor.ListExtractor.InfoItemsPage;
|
||||||
import org.schabi.newpipe.extractor.comments.CommentsInfo;
|
|
||||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||||
import org.schabi.newpipe.extractor.stream.StreamInfo;
|
import org.schabi.newpipe.extractor.stream.StreamInfo;
|
||||||
|
|
||||||
@ -31,7 +30,7 @@ public class ExtractorHelper {
|
|||||||
public static List<InfoItem> getRelatedVideosOrLogError(StreamInfo info, StreamExtractor extractor) {
|
public static List<InfoItem> getRelatedVideosOrLogError(StreamInfo info, StreamExtractor extractor) {
|
||||||
try {
|
try {
|
||||||
InfoItemsCollector<? extends InfoItem, ?> collector = extractor.getRelatedStreams();
|
InfoItemsCollector<? extends InfoItem, ?> collector = extractor.getRelatedStreams();
|
||||||
if(collector == null) return Collections.emptyList();
|
if (collector == null) return Collections.emptyList();
|
||||||
info.addAllErrors(collector.getErrors());
|
info.addAllErrors(collector.getErrors());
|
||||||
|
|
||||||
//noinspection unchecked
|
//noinspection unchecked
|
||||||
|
@ -1,16 +1,14 @@
|
|||||||
package org.schabi.newpipe.extractor.utils;
|
package org.schabi.newpipe.extractor.utils;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import javax.annotation.Nonnull;
|
|
||||||
import javax.annotation.Nullable;
|
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|
||||||
|
|
||||||
import com.grack.nanojson.JsonArray;
|
import com.grack.nanojson.JsonArray;
|
||||||
import com.grack.nanojson.JsonObject;
|
import com.grack.nanojson.JsonObject;
|
||||||
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
|
|
||||||
|
import javax.annotation.Nonnull;
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class JsonUtils {
|
public class JsonUtils {
|
||||||
|
|
||||||
|
@ -1,5 +1,10 @@
|
|||||||
package org.schabi.newpipe.extractor.utils;
|
package org.schabi.newpipe.extractor.utils;
|
||||||
|
|
||||||
|
import org.nibor.autolink.LinkExtractor;
|
||||||
|
import org.nibor.autolink.LinkSpan;
|
||||||
|
import org.nibor.autolink.LinkType;
|
||||||
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
|
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.net.URLDecoder;
|
import java.net.URLDecoder;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -9,11 +14,6 @@ import java.util.Map;
|
|||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import org.nibor.autolink.LinkExtractor;
|
|
||||||
import org.nibor.autolink.LinkSpan;
|
|
||||||
import org.nibor.autolink.LinkType;
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Created by Christian Schabesberger on 02.02.16.
|
* Created by Christian Schabesberger on 02.02.16.
|
||||||
*
|
*
|
||||||
@ -102,7 +102,7 @@ public class Parser {
|
|||||||
.linkTypes(EnumSet.of(LinkType.URL, LinkType.WWW))
|
.linkTypes(EnumSet.of(LinkType.URL, LinkType.WWW))
|
||||||
.build();
|
.build();
|
||||||
Iterable<LinkSpan> linkss = linkExtractor.extractLinks(txt);
|
Iterable<LinkSpan> linkss = linkExtractor.extractLinks(txt);
|
||||||
for(LinkSpan ls : linkss) {
|
for (LinkSpan ls : linkss) {
|
||||||
links.add(txt.substring(ls.getBeginIndex(), ls.getEndIndex()));
|
links.add(txt.substring(ls.getBeginIndex(), ls.getEndIndex()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
package org.schabi.newpipe.extractor.utils;
|
package org.schabi.newpipe.extractor.utils;
|
||||||
|
|
||||||
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
|
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.URLDecoder;
|
import java.net.URLDecoder;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|
||||||
|
|
||||||
public class Utils {
|
public class Utils {
|
||||||
|
|
||||||
private Utils() {
|
private Utils() {
|
||||||
@ -35,6 +35,7 @@ public class Utils {
|
|||||||
* <li>1.23K -> 1230</li>
|
* <li>1.23K -> 1230</li>
|
||||||
* <li>1.23M -> 1230000</li>
|
* <li>1.23M -> 1230000</li>
|
||||||
* </ul>
|
* </ul>
|
||||||
|
*
|
||||||
* @param numberWord string to be converted to a long
|
* @param numberWord string to be converted to a long
|
||||||
* @return a long
|
* @return a long
|
||||||
* @throws NumberFormatException
|
* @throws NumberFormatException
|
||||||
@ -171,7 +172,7 @@ public class Utils {
|
|||||||
s = s.substring(1);
|
s = s.substring(1);
|
||||||
}
|
}
|
||||||
if (s.endsWith("\uFEFF")) {
|
if (s.endsWith("\uFEFF")) {
|
||||||
s = s.substring(0, s.length()-1);
|
s = s.substring(0, s.length() - 1);
|
||||||
}
|
}
|
||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
|
@ -45,6 +45,6 @@ public class MediaCCCConferenceExtractorTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGetInitalPage() throws Exception {
|
public void testGetInitalPage() throws Exception {
|
||||||
assertEquals(97,extractor.getInitialPage().getItems().size());
|
assertEquals(97, extractor.getInitialPage().getItems().size());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -49,8 +49,8 @@ public class MediaCCCConferenceListExtractorTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private boolean contains(List<InfoItem> itemList, String name) {
|
private boolean contains(List<InfoItem> itemList, String name) {
|
||||||
for(InfoItem item : itemList) {
|
for (InfoItem item : itemList) {
|
||||||
if(item.getName().equals(name))
|
if (item.getName().equals(name))
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
@ -33,7 +33,7 @@ public class MediaCCCOggTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void getAudioStreamsContainOgg() throws Exception {
|
public void getAudioStreamsContainOgg() throws Exception {
|
||||||
for(AudioStream stream : extractor.getAudioStreams()) {
|
for (AudioStream stream : extractor.getAudioStreams()) {
|
||||||
assertEquals("OGG", stream.getFormat().toString());
|
assertEquals("OGG", stream.getFormat().toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ public class MediaCCCSearchExtractorAllTest {
|
|||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void setUpClass() throws Exception {
|
public static void setUpClass() throws Exception {
|
||||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||||
extractor = MediaCCC.getSearchExtractor( new MediaCCCSearchQueryHandlerFactory()
|
extractor = MediaCCC.getSearchExtractor(new MediaCCCSearchQueryHandlerFactory()
|
||||||
.fromQuery("c3", Arrays.asList(new String[0]), ""));
|
.fromQuery("c3", Arrays.asList(new String[0]), ""));
|
||||||
extractor.fetchPage();
|
extractor.fetchPage();
|
||||||
itemsPage = extractor.getInitialPage();
|
itemsPage = extractor.getInitialPage();
|
||||||
@ -37,8 +37,8 @@ public class MediaCCCSearchExtractorAllTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testIfChannelInfoItemsAvailable() {
|
public void testIfChannelInfoItemsAvailable() {
|
||||||
boolean isAvialable = false;
|
boolean isAvialable = false;
|
||||||
for(InfoItem item : itemsPage.getItems()) {
|
for (InfoItem item : itemsPage.getItems()) {
|
||||||
if(item instanceof ChannelInfoItem) {
|
if (item instanceof ChannelInfoItem) {
|
||||||
isAvialable = true;
|
isAvialable = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -48,8 +48,8 @@ public class MediaCCCSearchExtractorAllTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testIfStreamInfoitemsAvailable() {
|
public void testIfStreamInfoitemsAvailable() {
|
||||||
boolean isAvialable = false;
|
boolean isAvialable = false;
|
||||||
for(InfoItem item : itemsPage.getItems()) {
|
for (InfoItem item : itemsPage.getItems()) {
|
||||||
if(item instanceof StreamInfoItem) {
|
if (item instanceof StreamInfoItem) {
|
||||||
isAvialable = true;
|
isAvialable = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ public class MediaCCCSearchExtractorConferencesTest {
|
|||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void setUpClass() throws Exception {
|
public static void setUpClass() throws Exception {
|
||||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||||
extractor = MediaCCC.getSearchExtractor( new MediaCCCSearchQueryHandlerFactory()
|
extractor = MediaCCC.getSearchExtractor(new MediaCCCSearchQueryHandlerFactory()
|
||||||
.fromQuery("c3", Arrays.asList(new String[]{"conferences"}), ""));
|
.fromQuery("c3", Arrays.asList(new String[]{"conferences"}), ""));
|
||||||
extractor.fetchPage();
|
extractor.fetchPage();
|
||||||
itemsPage = extractor.getInitialPage();
|
itemsPage = extractor.getInitialPage();
|
||||||
@ -35,7 +35,7 @@ public class MediaCCCSearchExtractorConferencesTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testReturnTypeChannel() {
|
public void testReturnTypeChannel() {
|
||||||
for(InfoItem item : itemsPage.getItems()) {
|
for (InfoItem item : itemsPage.getItems()) {
|
||||||
assertTrue("Item is not of type channel", item instanceof ChannelInfoItem);
|
assertTrue("Item is not of type channel", item instanceof ChannelInfoItem);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ public class MediaCCCSearchExtractorEventsTest {
|
|||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void setUpClass() throws Exception {
|
public static void setUpClass() throws Exception {
|
||||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||||
extractor = MediaCCC.getSearchExtractor( new MediaCCCSearchQueryHandlerFactory()
|
extractor = MediaCCC.getSearchExtractor(new MediaCCCSearchQueryHandlerFactory()
|
||||||
.fromQuery("linux", Arrays.asList(new String[]{"events"}), ""));
|
.fromQuery("linux", Arrays.asList(new String[]{"events"}), ""));
|
||||||
extractor.fetchPage();
|
extractor.fetchPage();
|
||||||
itemsPage = extractor.getInitialPage();
|
itemsPage = extractor.getInitialPage();
|
||||||
@ -65,7 +65,7 @@ public class MediaCCCSearchExtractorEventsTest {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testReturnTypeStream() throws Exception {
|
public void testReturnTypeStream() throws Exception {
|
||||||
for(InfoItem item : itemsPage.getItems()) {
|
for (InfoItem item : itemsPage.getItems()) {
|
||||||
assertTrue("Item is not of type StreamInfoItem", item instanceof StreamInfoItem);
|
assertTrue("Item is not of type StreamInfoItem", item instanceof StreamInfoItem);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube;
|
package org.schabi.newpipe.extractor.services.peertube;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertNotNull;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertEmpty;
|
|
||||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertIsSecureUrl;
|
|
||||||
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
|
||||||
import static org.schabi.newpipe.extractor.services.DefaultTests.defaultTestGetPageInNewExtractor;
|
|
||||||
import static org.schabi.newpipe.extractor.services.DefaultTests.defaultTestMoreItems;
|
|
||||||
import static org.schabi.newpipe.extractor.services.DefaultTests.defaultTestRelatedItems;
|
|
||||||
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
@ -20,6 +10,12 @@ import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|||||||
import org.schabi.newpipe.extractor.services.BaseChannelExtractorTest;
|
import org.schabi.newpipe.extractor.services.BaseChannelExtractorTest;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeChannelExtractor;
|
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeChannelExtractor;
|
||||||
|
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertEmpty;
|
||||||
|
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertIsSecureUrl;
|
||||||
|
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
||||||
|
import static org.schabi.newpipe.extractor.services.DefaultTests.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for {@link PeertubeChannelExtractor}
|
* Test for {@link PeertubeChannelExtractor}
|
||||||
*/
|
*/
|
||||||
|
@ -1,8 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube;
|
package org.schabi.newpipe.extractor.services.peertube;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.schabi.newpipe.DownloaderTestImpl;
|
import org.schabi.newpipe.DownloaderTestImpl;
|
||||||
@ -10,6 +7,9 @@ import org.schabi.newpipe.extractor.NewPipe;
|
|||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeChannelLinkHandlerFactory;
|
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeChannelLinkHandlerFactory;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for {@link PeertubeChannelLinkHandlerFactory}
|
* Test for {@link PeertubeChannelLinkHandlerFactory}
|
||||||
*/
|
*/
|
||||||
|
@ -1,12 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube;
|
package org.schabi.newpipe.extractor.services.peertube;
|
||||||
|
|
||||||
import static org.junit.Assert.assertFalse;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.jsoup.helper.StringUtil;
|
import org.jsoup.helper.StringUtil;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
@ -18,6 +11,13 @@ import org.schabi.newpipe.extractor.comments.CommentsInfoItem;
|
|||||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeCommentsExtractor;
|
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeCommentsExtractor;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
||||||
|
|
||||||
public class PeertubeCommentsExtractorTest {
|
public class PeertubeCommentsExtractorTest {
|
||||||
|
|
||||||
private static PeertubeCommentsExtractor extractor;
|
private static PeertubeCommentsExtractor extractor;
|
||||||
@ -63,7 +63,7 @@ public class PeertubeCommentsExtractorTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testGetCommentsAllData() throws IOException, ExtractionException {
|
public void testGetCommentsAllData() throws IOException, ExtractionException {
|
||||||
InfoItemsPage<CommentsInfoItem> comments = extractor.getInitialPage();
|
InfoItemsPage<CommentsInfoItem> comments = extractor.getInitialPage();
|
||||||
for(CommentsInfoItem c: comments.getItems()) {
|
for (CommentsInfoItem c : comments.getItems()) {
|
||||||
assertFalse(StringUtil.isBlank(c.getAuthorEndpoint()));
|
assertFalse(StringUtil.isBlank(c.getAuthorEndpoint()));
|
||||||
assertFalse(StringUtil.isBlank(c.getAuthorName()));
|
assertFalse(StringUtil.isBlank(c.getAuthorName()));
|
||||||
assertFalse(StringUtil.isBlank(c.getAuthorThumbnail()));
|
assertFalse(StringUtil.isBlank(c.getAuthorThumbnail()));
|
||||||
@ -82,8 +82,8 @@ public class PeertubeCommentsExtractorTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private boolean findInComments(List<CommentsInfoItem> comments, String comment) {
|
private boolean findInComments(List<CommentsInfoItem> comments, String comment) {
|
||||||
for(CommentsInfoItem c: comments) {
|
for (CommentsInfoItem c : comments) {
|
||||||
if(c.getCommentText().contains(comment)) {
|
if (c.getCommentText().contains(comment)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube;
|
package org.schabi.newpipe.extractor.services.peertube;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.schabi.newpipe.DownloaderTestImpl;
|
import org.schabi.newpipe.DownloaderTestImpl;
|
||||||
@ -10,6 +7,9 @@ import org.schabi.newpipe.extractor.NewPipe;
|
|||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeCommentsLinkHandlerFactory;
|
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeCommentsLinkHandlerFactory;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for {@link PeertubeCommentsLinkHandlerFactory}
|
* Test for {@link PeertubeCommentsLinkHandlerFactory}
|
||||||
*/
|
*/
|
||||||
|
@ -1,8 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube;
|
package org.schabi.newpipe.extractor.services.peertube;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.schabi.newpipe.DownloaderTestImpl;
|
import org.schabi.newpipe.DownloaderTestImpl;
|
||||||
@ -10,6 +7,9 @@ import org.schabi.newpipe.extractor.NewPipe;
|
|||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubePlaylistLinkHandlerFactory;
|
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubePlaylistLinkHandlerFactory;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for {@link PeertubePlaylistLinkHandlerFactory}
|
* Test for {@link PeertubePlaylistLinkHandlerFactory}
|
||||||
*/
|
*/
|
||||||
|
@ -1,19 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube;
|
package org.schabi.newpipe.extractor.services.peertube;
|
||||||
|
|
||||||
import static java.util.Objects.requireNonNull;
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertFalse;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertIsSecureUrl;
|
|
||||||
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.text.ParseException;
|
|
||||||
import java.text.SimpleDateFormat;
|
|
||||||
import java.util.Calendar;
|
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.TimeZone;
|
|
||||||
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
@ -26,6 +12,18 @@ import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
|||||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.text.ParseException;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.TimeZone;
|
||||||
|
|
||||||
|
import static java.util.Objects.requireNonNull;
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertIsSecureUrl;
|
||||||
|
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for {@link StreamExtractor}
|
* Test for {@link StreamExtractor}
|
||||||
*/
|
*/
|
||||||
|
@ -1,8 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube;
|
package org.schabi.newpipe.extractor.services.peertube;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.schabi.newpipe.DownloaderTestImpl;
|
import org.schabi.newpipe.DownloaderTestImpl;
|
||||||
@ -10,6 +7,9 @@ import org.schabi.newpipe.extractor.NewPipe;
|
|||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeStreamLinkHandlerFactory;
|
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeStreamLinkHandlerFactory;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for {@link PeertubeStreamLinkHandlerFactory}
|
* Test for {@link PeertubeStreamLinkHandlerFactory}
|
||||||
*/
|
*/
|
||||||
|
@ -1,13 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube;
|
package org.schabi.newpipe.extractor.services.peertube;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertFalse;
|
|
||||||
import static org.junit.Assert.assertNotNull;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.schabi.newpipe.DownloaderTestImpl;
|
import org.schabi.newpipe.DownloaderTestImpl;
|
||||||
@ -17,6 +9,11 @@ import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
|||||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeTrendingExtractor;
|
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeTrendingExtractor;
|
||||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for {@link PeertubeTrendingExtractor}
|
* Test for {@link PeertubeTrendingExtractor}
|
||||||
*/
|
*/
|
||||||
@ -53,10 +50,10 @@ public class PeertubeTrendingExtractorTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testGetStreams() throws Exception {
|
public void testGetStreams() throws Exception {
|
||||||
ListExtractor.InfoItemsPage<StreamInfoItem> page = extractor.getInitialPage();
|
ListExtractor.InfoItemsPage<StreamInfoItem> page = extractor.getInitialPage();
|
||||||
if(!page.getErrors().isEmpty()) {
|
if (!page.getErrors().isEmpty()) {
|
||||||
System.err.println("----------");
|
System.err.println("----------");
|
||||||
List<Throwable> errors = page.getErrors();
|
List<Throwable> errors = page.getErrors();
|
||||||
for(Throwable e: errors) {
|
for (Throwable e : errors) {
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
System.err.println("----------");
|
System.err.println("----------");
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube;
|
package org.schabi.newpipe.extractor.services.peertube;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
|
||||||
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.schabi.newpipe.DownloaderTestImpl;
|
import org.schabi.newpipe.DownloaderTestImpl;
|
||||||
@ -12,6 +8,10 @@ import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|||||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeTrendingLinkHandlerFactory;
|
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeTrendingLinkHandlerFactory;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for {@link PeertubeTrendingLinkHandlerFactory}
|
* Test for {@link PeertubeTrendingLinkHandlerFactory}
|
||||||
*/
|
*/
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.search;
|
package org.schabi.newpipe.extractor.services.peertube.search;
|
||||||
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.schabi.newpipe.extractor.InfoItem;
|
import org.schabi.newpipe.extractor.InfoItem;
|
||||||
import org.schabi.newpipe.extractor.ListExtractor;
|
import org.schabi.newpipe.extractor.ListExtractor;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeSearchExtractor;
|
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeSearchExtractor;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for {@link PeertubeSearchExtractor}
|
* Test for {@link PeertubeSearchExtractor}
|
||||||
*/
|
*/
|
||||||
|
@ -1,10 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.search;
|
package org.schabi.newpipe.extractor.services.peertube.search;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertFalse;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
|
||||||
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.schabi.newpipe.DownloaderTestImpl;
|
import org.schabi.newpipe.DownloaderTestImpl;
|
||||||
@ -15,6 +10,9 @@ import org.schabi.newpipe.extractor.services.peertube.PeertubeInstance;
|
|||||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeSearchExtractor;
|
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeSearchExtractor;
|
||||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||||
|
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for {@link PeertubeSearchExtractor}
|
* Test for {@link PeertubeSearchExtractor}
|
||||||
*/
|
*/
|
||||||
@ -45,8 +43,8 @@ public class PeertubeSearchExtractorDefaultTest extends PeertubeSearchExtractorB
|
|||||||
@Test
|
@Test
|
||||||
public void testResultListCheckIfContainsStreamItems() {
|
public void testResultListCheckIfContainsStreamItems() {
|
||||||
boolean hasStreams = false;
|
boolean hasStreams = false;
|
||||||
for(InfoItem item : itemsPage.getItems()) {
|
for (InfoItem item : itemsPage.getItems()) {
|
||||||
if(item instanceof StreamInfoItem) {
|
if (item instanceof StreamInfoItem) {
|
||||||
hasStreams = true;
|
hasStreams = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -67,7 +65,7 @@ public class PeertubeSearchExtractorDefaultTest extends PeertubeSearchExtractorB
|
|||||||
boolean equals = true;
|
boolean equals = true;
|
||||||
for (int i = 0; i < secondPage.getItems().size()
|
for (int i = 0; i < secondPage.getItems().size()
|
||||||
&& i < itemsPage.getItems().size(); i++) {
|
&& i < itemsPage.getItems().size(); i++) {
|
||||||
if(!secondPage.getItems().get(i).getUrl().equals(
|
if (!secondPage.getItems().get(i).getUrl().equals(
|
||||||
itemsPage.getItems().get(i).getUrl())) {
|
itemsPage.getItems().get(i).getUrl())) {
|
||||||
equals = false;
|
equals = false;
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
package org.schabi.newpipe.extractor.services.peertube.search;
|
package org.schabi.newpipe.extractor.services.peertube.search;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
|
||||||
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.schabi.newpipe.extractor.services.peertube.PeertubeInstance;
|
import org.schabi.newpipe.extractor.services.peertube.PeertubeInstance;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
||||||
|
|
||||||
public class PeertubeSearchQHTest {
|
public class PeertubeSearchQHTest {
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
@ -18,7 +18,7 @@ public class PeertubeSearchQHTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testRegularValues() throws Exception {
|
public void testRegularValues() throws Exception {
|
||||||
assertEquals("https://peertube.mastodon.host/api/v1/search/videos?search=asdf", PeerTube.getSearchQHFactory().fromQuery("asdf").getUrl());
|
assertEquals("https://peertube.mastodon.host/api/v1/search/videos?search=asdf", PeerTube.getSearchQHFactory().fromQuery("asdf").getUrl());
|
||||||
assertEquals("https://peertube.mastodon.host/api/v1/search/videos?search=hans",PeerTube.getSearchQHFactory().fromQuery("hans").getUrl());
|
assertEquals("https://peertube.mastodon.host/api/v1/search/videos?search=hans", PeerTube.getSearchQHFactory().fromQuery("hans").getUrl());
|
||||||
assertEquals("https://peertube.mastodon.host/api/v1/search/videos?search=Poifj%26jaijf", PeerTube.getSearchQHFactory().fromQuery("Poifj&jaijf").getUrl());
|
assertEquals("https://peertube.mastodon.host/api/v1/search/videos?search=Poifj%26jaijf", PeerTube.getSearchQHFactory().fromQuery("Poifj&jaijf").getUrl());
|
||||||
assertEquals("https://peertube.mastodon.host/api/v1/search/videos?search=G%C3%BCl%C3%BCm", PeerTube.getSearchQHFactory().fromQuery("Gülüm").getUrl());
|
assertEquals("https://peertube.mastodon.host/api/v1/search/videos?search=G%C3%BCl%C3%BCm", PeerTube.getSearchQHFactory().fromQuery("Gülüm").getUrl());
|
||||||
assertEquals("https://peertube.mastodon.host/api/v1/search/videos?search=%3Fj%24%29H%C2%A7B", PeerTube.getSearchQHFactory().fromQuery("?j$)H§B").getUrl());
|
assertEquals("https://peertube.mastodon.host/api/v1/search/videos?search=%3Fj%24%29H%C2%A7B", PeerTube.getSearchQHFactory().fromQuery("?j$)H§B").getUrl());
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
package org.schabi.newpipe.extractor.services.soundcloud;
|
package org.schabi.newpipe.extractor.services.soundcloud;
|
||||||
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Ignore;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.schabi.newpipe.DownloaderTestImpl;
|
import org.schabi.newpipe.DownloaderTestImpl;
|
||||||
import org.schabi.newpipe.extractor.ListExtractor;
|
import org.schabi.newpipe.extractor.ListExtractor;
|
||||||
@ -48,10 +47,10 @@ public class SoundcloudChartsExtractorTest {
|
|||||||
@Test
|
@Test
|
||||||
public void testGetStreams() throws Exception {
|
public void testGetStreams() throws Exception {
|
||||||
ListExtractor.InfoItemsPage<StreamInfoItem> page = extractor.getInitialPage();
|
ListExtractor.InfoItemsPage<StreamInfoItem> page = extractor.getInitialPage();
|
||||||
if(!page.getErrors().isEmpty()) {
|
if (!page.getErrors().isEmpty()) {
|
||||||
System.err.println("----------");
|
System.err.println("----------");
|
||||||
List<Throwable> errors = page.getErrors();
|
List<Throwable> errors = page.getErrors();
|
||||||
for(Throwable e: errors) {
|
for (Throwable e : errors) {
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
System.err.println("----------");
|
System.err.println("----------");
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
package org.schabi.newpipe.extractor.services.soundcloud;
|
package org.schabi.newpipe.extractor.services.soundcloud;
|
||||||
|
|
||||||
import org.junit.*;
|
import org.junit.Assert;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.Test;
|
||||||
import org.schabi.newpipe.DownloaderTestImpl;
|
import org.schabi.newpipe.DownloaderTestImpl;
|
||||||
import org.schabi.newpipe.extractor.NewPipe;
|
import org.schabi.newpipe.extractor.NewPipe;
|
||||||
|
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
public class SoundcloudParsingHelperTest {
|
public class SoundcloudParsingHelperTest {
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
|
@ -38,7 +38,7 @@ public class SoundcloudSearchExtractorChannelOnlyTest extends SoundcloudSearchEx
|
|||||||
boolean equals = true;
|
boolean equals = true;
|
||||||
for (int i = 0; i < secondPage.getItems().size()
|
for (int i = 0; i < secondPage.getItems().size()
|
||||||
&& i < itemsPage.getItems().size(); i++) {
|
&& i < itemsPage.getItems().size(); i++) {
|
||||||
if(!secondPage.getItems().get(i).getUrl().equals(
|
if (!secondPage.getItems().get(i).getUrl().equals(
|
||||||
itemsPage.getItems().get(i).getUrl())) {
|
itemsPage.getItems().get(i).getUrl())) {
|
||||||
equals = false;
|
equals = false;
|
||||||
}
|
}
|
||||||
@ -57,8 +57,8 @@ public class SoundcloudSearchExtractorChannelOnlyTest extends SoundcloudSearchEx
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testOnlyContainChannels() {
|
public void testOnlyContainChannels() {
|
||||||
for(InfoItem item : itemsPage.getItems()) {
|
for (InfoItem item : itemsPage.getItems()) {
|
||||||
if(!(item instanceof ChannelInfoItem)) {
|
if (!(item instanceof ChannelInfoItem)) {
|
||||||
fail("The following item is no channel item: " + item.toString());
|
fail("The following item is no channel item: " + item.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -60,8 +60,8 @@ public class SoundcloudSearchExtractorDefaultTest extends SoundcloudSearchExtrac
|
|||||||
@Test
|
@Test
|
||||||
public void testResultListCheckIfContainsStreamItems() {
|
public void testResultListCheckIfContainsStreamItems() {
|
||||||
boolean hasStreams = false;
|
boolean hasStreams = false;
|
||||||
for(InfoItem item : itemsPage.getItems()) {
|
for (InfoItem item : itemsPage.getItems()) {
|
||||||
if(item instanceof StreamInfoItem) {
|
if (item instanceof StreamInfoItem) {
|
||||||
hasStreams = true;
|
hasStreams = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -80,7 +80,7 @@ public class SoundcloudSearchExtractorDefaultTest extends SoundcloudSearchExtrac
|
|||||||
boolean equals = true;
|
boolean equals = true;
|
||||||
for (int i = 0; i < secondPage.getItems().size()
|
for (int i = 0; i < secondPage.getItems().size()
|
||||||
&& i < itemsPage.getItems().size(); i++) {
|
&& i < itemsPage.getItems().size(); i++) {
|
||||||
if(!secondPage.getItems().get(i).getUrl().equals(
|
if (!secondPage.getItems().get(i).getUrl().equals(
|
||||||
itemsPage.getItems().get(i).getUrl())) {
|
itemsPage.getItems().get(i).getUrl())) {
|
||||||
equals = false;
|
equals = false;
|
||||||
}
|
}
|
||||||
|
@ -1,14 +1,5 @@
|
|||||||
package org.schabi.newpipe.extractor.services.youtube;
|
package org.schabi.newpipe.extractor.services.youtube;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
import static org.junit.Assert.fail;
|
|
||||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertIsSecureUrl;
|
|
||||||
import static org.schabi.newpipe.extractor.ServiceList.YouTube;
|
|
||||||
import static org.schabi.newpipe.extractor.services.DefaultTests.defaultTestGetPageInNewExtractor;
|
|
||||||
import static org.schabi.newpipe.extractor.services.DefaultTests.defaultTestMoreItems;
|
|
||||||
import static org.schabi.newpipe.extractor.services.DefaultTests.defaultTestRelatedItems;
|
|
||||||
|
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.schabi.newpipe.DownloaderTestImpl;
|
import org.schabi.newpipe.DownloaderTestImpl;
|
||||||
@ -16,10 +7,14 @@ import org.schabi.newpipe.extractor.NewPipe;
|
|||||||
import org.schabi.newpipe.extractor.ServiceList;
|
import org.schabi.newpipe.extractor.ServiceList;
|
||||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||||
import org.schabi.newpipe.extractor.localization.Localization;
|
|
||||||
import org.schabi.newpipe.extractor.services.BaseChannelExtractorTest;
|
import org.schabi.newpipe.extractor.services.BaseChannelExtractorTest;
|
||||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeChannelExtractor;
|
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeChannelExtractor;
|
||||||
|
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertIsSecureUrl;
|
||||||
|
import static org.schabi.newpipe.extractor.ServiceList.YouTube;
|
||||||
|
import static org.schabi.newpipe.extractor.services.DefaultTests.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for {@link ChannelExtractor}
|
* Test for {@link ChannelExtractor}
|
||||||
*/
|
*/
|
||||||
@ -489,7 +484,6 @@ public class YoutubeChannelExtractorTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
public static class RandomChannel implements BaseChannelExtractorTest {
|
public static class RandomChannel implements BaseChannelExtractorTest {
|
||||||
private static YoutubeChannelExtractor extractor;
|
private static YoutubeChannelExtractor extractor;
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user