mirror of
https://github.com/TeamNewPipe/NewPipeExtractor.git
synced 2025-04-27 15:30:34 +05:30
Merge 'origin/dev' into dev
This commit is contained in:
commit
5c0a03328a
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,3 +1,3 @@
|
||||
- [ ] I carefully read the [contribution guidelines](https://github.com/TeamNewPipe/NewPipe/blob/HEAD/.github/CONTRIBUTING.md) and agree to them.
|
||||
- [ ] I did test the API against [NewPipe](https://github.com/TeamNewPipe/NewPipe).
|
||||
- [ ] I agree to ASAP create a PULL request for [NewPipe](https://github.com/TeamNewPipe/NewPipe) for making in compatible when I changed the api.
|
||||
- [ ] I have tested the API against [NewPipe](https://github.com/TeamNewPipe/NewPipe).
|
||||
- [ ] I agree to create a pull request for [NewPipe](https://github.com/TeamNewPipe/NewPipe) as soon as possible to make it compatible with the changed API.
|
||||
|
16
README.md
16
README.md
@ -1,6 +1,6 @@
|
||||
# NewPipe Extractor
|
||||
|
||||
[](https://travis-ci.org/TeamNewPipe/NewPipeExtractor) [](https://jitpack.io/#TeamNewPipe/NewPipeExtractor) [Documentation](https://teamnewpipe.github.io/documentation/)
|
||||
[](https://travis-ci.org/TeamNewPipe/NewPipeExtractor) [](https://jitpack.io/#TeamNewPipe/NewPipeExtractor) [JDoc](https://teamnewpipe.github.io/NewPipeExtractor/javadoc/) • [Documentation](https://teamnewpipe.github.io/documentation/)
|
||||
|
||||
NewPipe Extractor is a library for extracting things from streaming sites. It is a core component of [NewPipe](https://github.com/TeamNewPipe/NewPipe), but could be used independently.
|
||||
|
||||
@ -11,11 +11,21 @@ NewPipe Extractor is available at JitPack's Maven repo.
|
||||
If you're using Gradle, you could add NewPipe Extractor as a dependency with the following steps:
|
||||
|
||||
1. Add `maven { url 'https://jitpack.io' }` to the `repositories` in your `build.gradle`.
|
||||
2. Add `compile 'com.github.TeamNewPipe:NewPipeExtractor:v0.11.0'`the `dependencies` in your `build.gradle`. Replace `v0.11.0` with the latest release.
|
||||
2. Add `implementation 'com.github.TeamNewPipe:NewPipeExtractor:v0.19.0'`the `dependencies` in your `build.gradle`. Replace `v0.19.0` with the latest release.
|
||||
|
||||
### Testing changes
|
||||
|
||||
To test changes quickly you can build the library locally. Using the local Maven repository is a good approach, here's a gist of how to use it:
|
||||
To test changes quickly you can build the library locally. A good approach would be to add something like the following to your `settings.gradle`:
|
||||
|
||||
```groovy
|
||||
includeBuild('../NewPipeExtractor') {
|
||||
dependencySubstitution {
|
||||
substitute module('com.github.TeamNewPipe:NewPipeExtractor') with project(':extractor')
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Another approach would be to use the local Maven repository, here's a gist of how to use it:
|
||||
|
||||
1. Add `mavenLocal()` in your project `repositories` list (usually as the first entry to give priority above the others).
|
||||
2. It's _recommended_ that you change the `version` of this library (e.g. `LOCAL_SNAPSHOT`).
|
||||
|
@ -5,7 +5,7 @@ allprojects {
|
||||
sourceCompatibility = 1.7
|
||||
targetCompatibility = 1.7
|
||||
|
||||
version 'v0.18.6'
|
||||
version 'v0.19.0'
|
||||
group 'com.github.TeamNewPipe'
|
||||
|
||||
repositories {
|
||||
|
@ -3,16 +3,33 @@ package org.schabi.newpipe.extractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
/**
|
||||
* Base class to extractors that have a list (e.g. playlists, users).
|
||||
*/
|
||||
public abstract class ListExtractor<R extends InfoItem> extends Extractor {
|
||||
|
||||
/**
|
||||
* Constant that should be returned whenever
|
||||
* a list has an unknown number of items.
|
||||
*/
|
||||
public static final long ITEM_COUNT_UNKNOWN = -1;
|
||||
/**
|
||||
* Constant that should be returned whenever a list has an
|
||||
* infinite number of items. For example a YouTube mix.
|
||||
*/
|
||||
public static final long ITEM_COUNT_INFINITE = -2;
|
||||
/**
|
||||
* Constant that should be returned whenever a list
|
||||
* has an unknown number of items bigger than 100.
|
||||
*/
|
||||
public static final long ITEM_COUNT_MORE_THAN_100 = -3;
|
||||
|
||||
public ListExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
@ -0,0 +1,11 @@
|
||||
package org.schabi.newpipe.extractor.exceptions;
|
||||
|
||||
public class ContentNotSupportedException extends ParsingException {
|
||||
public ContentNotSupportedException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public ContentNotSupportedException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
@ -11,6 +11,10 @@ import org.schabi.newpipe.extractor.playlist.PlaylistInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 12.02.17.
|
||||
*
|
||||
@ -48,13 +52,31 @@ public class InfoItemsSearchCollector extends InfoItemsCollector<InfoItem, InfoI
|
||||
private final ChannelInfoItemsCollector userCollector;
|
||||
private final PlaylistInfoItemsCollector playlistCollector;
|
||||
|
||||
InfoItemsSearchCollector(int serviceId) {
|
||||
public InfoItemsSearchCollector(int serviceId) {
|
||||
super(serviceId);
|
||||
streamCollector = new StreamInfoItemsCollector(serviceId);
|
||||
userCollector = new ChannelInfoItemsCollector(serviceId);
|
||||
playlistCollector = new PlaylistInfoItemsCollector(serviceId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Throwable> getErrors() {
|
||||
final List<Throwable> errors = new ArrayList<>(super.getErrors());
|
||||
errors.addAll(streamCollector.getErrors());
|
||||
errors.addAll(userCollector.getErrors());
|
||||
errors.addAll(playlistCollector.getErrors());
|
||||
|
||||
return Collections.unmodifiableList(errors);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
super.reset();
|
||||
streamCollector.reset();
|
||||
userCollector.reset();
|
||||
playlistCollector.reset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItem extract(InfoItemExtractor extractor) throws ParsingException {
|
||||
// Use the corresponding collector for each item extractor type
|
||||
|
@ -17,11 +17,8 @@ public abstract class SearchExtractor extends ListExtractor<InfoItem> {
|
||||
}
|
||||
}
|
||||
|
||||
private final InfoItemsSearchCollector collector;
|
||||
|
||||
public SearchExtractor(StreamingService service, SearchQueryHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
collector = new InfoItemsSearchCollector(service.getServiceId());
|
||||
}
|
||||
|
||||
public String getSearchString() {
|
||||
@ -30,10 +27,6 @@ public abstract class SearchExtractor extends ListExtractor<InfoItem> {
|
||||
|
||||
public abstract String getSearchSuggestion() throws ParsingException;
|
||||
|
||||
protected InfoItemsSearchCollector getInfoItemSearchCollector() {
|
||||
return collector;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchQueryHandler getLinkHandler() {
|
||||
return (SearchQueryHandler) super.getLinkHandler();
|
||||
|
@ -33,7 +33,7 @@ public class BandcampSearchExtractor extends SearchExtractor {
|
||||
// okay apparently this is where we DOWNLOAD the page and then COMMIT its ENTRIES to an INFOITEMPAGE
|
||||
String html = getDownloader().get(pageUrl).responseBody();
|
||||
|
||||
InfoItemsSearchCollector collector = getInfoItemSearchCollector();
|
||||
InfoItemsSearchCollector collector = new InfoItemsSearchCollector(getServiceId());
|
||||
|
||||
|
||||
Document d = Jsoup.parse(html);
|
||||
|
@ -47,8 +47,7 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
|
||||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
InfoItemsSearchCollector searchItems = getInfoItemSearchCollector();
|
||||
searchItems.reset();
|
||||
final InfoItemsSearchCollector searchItems = new InfoItemsSearchCollector(getServiceId());
|
||||
|
||||
if (getLinkHandler().getContentFilters().contains(CONFERENCES)
|
||||
|| getLinkHandler().getContentFilters().contains(ALL)
|
||||
|
@ -77,7 +77,12 @@ public class PeertubeService extends StreamingService {
|
||||
@Override
|
||||
public ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler)
|
||||
throws ExtractionException {
|
||||
return new PeertubeChannelExtractor(this, linkHandler);
|
||||
|
||||
if (linkHandler.getUrl().contains("/video-channels/")) {
|
||||
return new PeertubeChannelExtractor(this, linkHandler);
|
||||
} else {
|
||||
return new PeertubeAccountExtractor(this, linkHandler);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -0,0 +1,187 @@
|
||||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.jsoup.helper.StringUtil;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class PeertubeAccountExtractor extends ChannelExtractor {
|
||||
|
||||
private static final String START_KEY = "start";
|
||||
private static final String COUNT_KEY = "count";
|
||||
private static final int ITEMS_PER_PAGE = 12;
|
||||
private static final String START_PATTERN = "start=(\\d*)";
|
||||
|
||||
private InfoItemsPage<StreamInfoItem> initPage;
|
||||
private long total;
|
||||
|
||||
private JsonObject json;
|
||||
private final String baseUrl;
|
||||
|
||||
public PeertubeAccountExtractor(StreamingService service, ListLinkHandler linkHandler) throws ParsingException {
|
||||
super(service, linkHandler);
|
||||
this.baseUrl = getBaseUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAvatarUrl() throws ParsingException {
|
||||
String value;
|
||||
try {
|
||||
value = JsonUtils.getString(json, "avatar.path");
|
||||
} catch (Exception e) {
|
||||
value = "/client/assets/images/default-avatar.png";
|
||||
}
|
||||
return baseUrl + value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getBannerUrl() throws ParsingException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFeedUrl() throws ParsingException {
|
||||
return getBaseUrl() + "/feeds/videos.xml?accountId=" + json.get("id");
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSubscriberCount() throws ParsingException {
|
||||
Number number = JsonUtils.getNumber(json, "followersCount");
|
||||
return number.longValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() throws ParsingException {
|
||||
try {
|
||||
return JsonUtils.getString(json, "description");
|
||||
} catch (ParsingException e) {
|
||||
return "No description";
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
super.fetchPage();
|
||||
return initPage;
|
||||
}
|
||||
|
||||
private void collectStreamsFrom(StreamInfoItemsCollector collector, JsonObject json, String pageUrl) throws ParsingException {
|
||||
JsonArray contents;
|
||||
try {
|
||||
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("unable to extract channel streams", e);
|
||||
}
|
||||
|
||||
for (Object c : contents) {
|
||||
if (c instanceof JsonObject) {
|
||||
final JsonObject item = (JsonObject) c;
|
||||
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
|
||||
collector.commit(extractor);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNextPageUrl() throws IOException, ExtractionException {
|
||||
super.fetchPage();
|
||||
return initPage.getNextPageUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
Response response = getDownloader().get(pageUrl);
|
||||
JsonObject json = null;
|
||||
if (null != response && !StringUtil.isBlank(response.responseBody())) {
|
||||
try {
|
||||
json = JsonParser.object().from(response.responseBody());
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not parse json data for kiosk info", e);
|
||||
}
|
||||
}
|
||||
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
if (json != null) {
|
||||
PeertubeParsingHelper.validate(json);
|
||||
Number number = JsonUtils.getNumber(json, "total");
|
||||
if (number != null) this.total = number.longValue();
|
||||
collectStreamsFrom(collector, json, pageUrl);
|
||||
} else {
|
||||
throw new ExtractionException("Unable to get PeerTube kiosk info");
|
||||
}
|
||||
return new InfoItemsPage<>(collector, getNextPageUrl(pageUrl));
|
||||
}
|
||||
|
||||
|
||||
private String getNextPageUrl(String prevPageUrl) {
|
||||
String prevStart;
|
||||
try {
|
||||
prevStart = Parser.matchGroup1(START_PATTERN, prevPageUrl);
|
||||
} catch (RegexException e) {
|
||||
return "";
|
||||
}
|
||||
if (StringUtil.isBlank(prevStart)) return "";
|
||||
long nextStart = 0;
|
||||
try {
|
||||
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
||||
} catch (NumberFormatException e) {
|
||||
return "";
|
||||
}
|
||||
|
||||
if (nextStart >= total) {
|
||||
return "";
|
||||
} else {
|
||||
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
|
||||
Response response = downloader.get(getUrl());
|
||||
if (null != response && null != response.responseBody()) {
|
||||
setInitialData(response.responseBody());
|
||||
} else {
|
||||
throw new ExtractionException("Unable to extract PeerTube channel data");
|
||||
}
|
||||
|
||||
String pageUrl = getUrl() + "/videos?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
|
||||
this.initPage = getPage(pageUrl);
|
||||
}
|
||||
|
||||
private void setInitialData(String responseBody) throws ExtractionException {
|
||||
try {
|
||||
json = JsonParser.object().from(responseBody);
|
||||
} catch (JsonParserException e) {
|
||||
throw new ExtractionException("Unable to extract peertube channel data", e);
|
||||
}
|
||||
if (json == null) throw new ExtractionException("Unable to extract PeerTube channel data");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
return JsonUtils.getString(json, "displayName");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getOriginalUrl() throws ParsingException {
|
||||
return baseUrl + "/" + getId();
|
||||
}
|
||||
|
||||
}
|
@ -57,7 +57,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
||||
|
||||
@Override
|
||||
public String getFeedUrl() throws ParsingException {
|
||||
return null;
|
||||
return getBaseUrl() + "/feeds/videos.xml?videoChannelId=" + json.get("id");
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -181,7 +181,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
|
||||
|
||||
@Override
|
||||
public String getOriginalUrl() throws ParsingException {
|
||||
return baseUrl + "/accounts/" + getId();
|
||||
return baseUrl + "/" + getId();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -97,7 +97,7 @@ public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtrac
|
||||
public String getAuthorEndpoint() throws ParsingException {
|
||||
String name = JsonUtils.getString(item, "account.name");
|
||||
String host = JsonUtils.getString(item, "account.host");
|
||||
return ServiceList.PeerTube.getChannelLHFactory().fromId(name + "@" + host, baseUrl).getUrl();
|
||||
return ServiceList.PeerTube.getChannelLHFactory().fromId("accounts/" + name + "@" + host, baseUrl).getUrl();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -47,8 +47,7 @@ public class PeertubeSearchExtractor extends SearchExtractor {
|
||||
}
|
||||
|
||||
private InfoItemsCollector<InfoItem, InfoItemExtractor> collectStreamsFrom(JsonObject json) throws ParsingException {
|
||||
|
||||
final InfoItemsSearchCollector collector = getInfoItemSearchCollector();
|
||||
final InfoItemsSearchCollector collector = new InfoItemsSearchCollector(getServiceId());
|
||||
|
||||
JsonArray contents;
|
||||
try {
|
||||
|
@ -128,7 +128,7 @@ public class PeertubeStreamExtractor extends StreamExtractor {
|
||||
public String getUploaderUrl() throws ParsingException {
|
||||
String name = JsonUtils.getString(json, "account.name");
|
||||
String host = JsonUtils.getString(json, "account.host");
|
||||
return getService().getChannelLHFactory().fromId(name + "@" + host, baseUrl).getUrl();
|
||||
return getService().getChannelLHFactory().fromId("accounts/" + name + "@" + host, baseUrl).getUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -51,7 +51,8 @@ public class PeertubeStreamInfoItemExtractor implements StreamInfoItemExtractor
|
||||
public String getUploaderUrl() throws ParsingException {
|
||||
String name = JsonUtils.getString(item, "account.name");
|
||||
String host = JsonUtils.getString(item, "account.host");
|
||||
return ServiceList.PeerTube.getChannelLHFactory().fromId(name + "@" + host, baseUrl).getUrl();
|
||||
|
||||
return ServiceList.PeerTube.getChannelLHFactory().fromId("accounts/" + name + "@" + host, baseUrl).getUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -10,8 +10,8 @@ import java.util.List;
|
||||
public class PeertubeChannelLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
|
||||
private static final PeertubeChannelLinkHandlerFactory instance = new PeertubeChannelLinkHandlerFactory();
|
||||
private static final String ID_PATTERN = "/accounts/([^/?&#]*)";
|
||||
private static final String ACCOUNTS_ENDPOINT = "/api/v1/accounts/";
|
||||
private static final String ID_PATTERN = "(accounts|video-channels)/([^/?&#]*)";
|
||||
private static final String API_ENDPOINT = "/api/v1/";
|
||||
|
||||
public static PeertubeChannelLinkHandlerFactory getInstance() {
|
||||
return instance;
|
||||
@ -19,7 +19,7 @@ public class PeertubeChannelLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
|
||||
@Override
|
||||
public String getId(String url) throws ParsingException {
|
||||
return Parser.matchGroup1(ID_PATTERN, url);
|
||||
return Parser.matchGroup(ID_PATTERN, url, 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -31,11 +31,17 @@ public class PeertubeChannelLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
@Override
|
||||
public String getUrl(String id, List<String> contentFilter, String sortFilter, String baseUrl)
|
||||
throws ParsingException {
|
||||
return baseUrl + ACCOUNTS_ENDPOINT + id;
|
||||
|
||||
if (id.matches(ID_PATTERN)) {
|
||||
return baseUrl + API_ENDPOINT + id;
|
||||
} else {
|
||||
// This is needed for compatibility with older versions were we didn't support video channels yet
|
||||
return baseUrl + API_ENDPOINT + "accounts/" + id;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onAcceptUrl(String url) {
|
||||
return url.contains("/accounts/");
|
||||
return url.contains("/accounts/") || url.contains("/video-channels/");
|
||||
}
|
||||
}
|
||||
|
@ -11,6 +11,8 @@ import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.schabi.newpipe.extractor.ServiceList.SoundCloud;
|
||||
|
||||
public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
|
||||
private StreamInfoItemsCollector collector = null;
|
||||
private String nextPageUrl = null;
|
||||
@ -44,7 +46,7 @@ public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
|
||||
}
|
||||
|
||||
|
||||
private void computNextPageAndStreams() throws IOException, ExtractionException {
|
||||
private void computeNextPageAndStreams() throws IOException, ExtractionException {
|
||||
collector = new StreamInfoItemsCollector(getServiceId());
|
||||
|
||||
String apiUrl = "https://api-v2.soundcloud.com/charts" +
|
||||
@ -57,11 +59,9 @@ public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
|
||||
apiUrl += "&kind=trending";
|
||||
}
|
||||
|
||||
/*List<String> supportedCountries = Arrays.asList("AU", "CA", "FR", "DE", "IE", "NL", "NZ", "GB", "US");
|
||||
String contentCountry = getContentCountry();
|
||||
if (supportedCountries.contains(contentCountry)) {
|
||||
apiUrl += "®ion=soundcloud:regions:" + contentCountry;
|
||||
}*/
|
||||
|
||||
String contentCountry = SoundCloud.getContentCountry().getCountryCode();
|
||||
apiUrl += "®ion=soundcloud:regions:" + contentCountry;
|
||||
|
||||
nextPageUrl = SoundcloudParsingHelper.getStreamsFromApi(collector, apiUrl, true);
|
||||
}
|
||||
@ -69,7 +69,7 @@ public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
|
||||
@Override
|
||||
public String getNextPageUrl() throws IOException, ExtractionException {
|
||||
if (nextPageUrl == null) {
|
||||
computNextPageAndStreams();
|
||||
computeNextPageAndStreams();
|
||||
}
|
||||
return nextPageUrl;
|
||||
}
|
||||
@ -78,7 +78,7 @@ public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
if (collector == null) {
|
||||
computNextPageAndStreams();
|
||||
computeNextPageAndStreams();
|
||||
}
|
||||
return new InfoItemsPage<>(collector, getNextPageUrl());
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ import static org.schabi.newpipe.extractor.ServiceList.SoundCloud;
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.replaceHttpWithHttps;
|
||||
|
||||
public class SoundcloudParsingHelper {
|
||||
private static final String HARDCODED_CLIENT_ID = "t0h1jzYMsaZXy6ggnZO71gHK3Ms6CFwE"; // Updated on 14/03/20
|
||||
private static final String HARDCODED_CLIENT_ID = "Uz4aPhG7GAl1VYGOnvOPW1wQ0M6xKtA9"; // Updated on 16/03/20
|
||||
private static String clientId;
|
||||
|
||||
private SoundcloudParsingHelper() {
|
||||
@ -42,7 +42,7 @@ public class SoundcloudParsingHelper {
|
||||
|
||||
Downloader dl = NewPipe.getDownloader();
|
||||
clientId = HARDCODED_CLIENT_ID;
|
||||
if (checkIfHardcodedClientIdIsValid(dl)) {
|
||||
if (checkIfHardcodedClientIdIsValid()) {
|
||||
return clientId;
|
||||
}
|
||||
|
||||
@ -73,11 +73,12 @@ public class SoundcloudParsingHelper {
|
||||
throw new ExtractionException("Couldn't extract client id");
|
||||
}
|
||||
|
||||
static boolean checkIfHardcodedClientIdIsValid(Downloader dl) {
|
||||
final String apiUrl = "https://api.soundcloud.com/connect?client_id=" + HARDCODED_CLIENT_ID;
|
||||
static boolean checkIfHardcodedClientIdIsValid() {
|
||||
try {
|
||||
// Should return 200 to indicate that the client id is valid, a 401 is returned otherwise.
|
||||
return dl.head(apiUrl).responseCode() == 200;
|
||||
SoundcloudStreamExtractor e = (SoundcloudStreamExtractor) SoundCloud
|
||||
.getStreamExtractor("https://soundcloud.com/liluzivert/do-what-i-want-produced-by-maaly-raw-don-cannon");
|
||||
e.fetchPage();
|
||||
return e.getAudioStreams().size() >= 1;
|
||||
} catch (Exception ignored) {
|
||||
// No need to throw an exception here. If something went wrong, the client_id is wrong
|
||||
return false;
|
||||
@ -107,7 +108,7 @@ public class SoundcloudParsingHelper {
|
||||
* See https://developers.soundcloud.com/docs/api/reference#resolve
|
||||
*/
|
||||
public static JsonObject resolveFor(Downloader downloader, String url) throws IOException, ExtractionException {
|
||||
String apiUrl = "https://api.soundcloud.com/resolve"
|
||||
String apiUrl = "https://api-v2.soundcloud.com/resolve"
|
||||
+ "?url=" + URLEncoder.encode(url, "UTF-8")
|
||||
+ "&client_id=" + clientId();
|
||||
|
||||
|
@ -1,8 +1,11 @@
|
||||
package org.schabi.newpipe.extractor.services.soundcloud;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
@ -12,16 +15,20 @@ import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
||||
private static final int streamsPerRequestedPage = 15;
|
||||
|
||||
private String playlistId;
|
||||
private JsonObject playlist;
|
||||
|
||||
private StreamInfoItemsCollector streamInfoItemsCollector = null;
|
||||
private String nextPageUrl = null;
|
||||
private StreamInfoItemsCollector streamInfoItemsCollector;
|
||||
private String nextPageUrl;
|
||||
|
||||
public SoundcloudPlaylistExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
@ -31,7 +38,7 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
|
||||
playlistId = getLinkHandler().getId();
|
||||
String apiUrl = "https://api.soundcloud.com/playlists/" + playlistId +
|
||||
String apiUrl = "https://api-v2.soundcloud.com/playlists/" + playlistId +
|
||||
"?client_id=" + SoundcloudParsingHelper.clientId() +
|
||||
"&representation=compact";
|
||||
|
||||
@ -55,6 +62,7 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
||||
return playlist.getString("title");
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public String getThumbnailUrl() {
|
||||
String artworkUrl = playlist.getString("artwork_url");
|
||||
@ -64,21 +72,20 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
||||
// if it also fails, return null
|
||||
try {
|
||||
final InfoItemsPage<StreamInfoItem> infoItems = getInitialPage();
|
||||
if (infoItems.getItems().isEmpty()) return null;
|
||||
|
||||
for (StreamInfoItem item : infoItems.getItems()) {
|
||||
final String thumbnailUrl = item.getThumbnailUrl();
|
||||
if (thumbnailUrl == null || thumbnailUrl.isEmpty()) continue;
|
||||
|
||||
String thumbnailUrlBetterResolution = thumbnailUrl.replace("large.jpg", "crop.jpg");
|
||||
return thumbnailUrlBetterResolution;
|
||||
artworkUrl = item.getThumbnailUrl();
|
||||
if (artworkUrl != null && !artworkUrl.isEmpty()) break;
|
||||
}
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
|
||||
if (artworkUrl == null) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
String artworkUrlBetterResolution = artworkUrl.replace("large.jpg", "crop.jpg");
|
||||
return artworkUrlBetterResolution;
|
||||
return artworkUrl.replace("large.jpg", "crop.jpg");
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -110,27 +117,42 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
if (streamInfoItemsCollector == null) {
|
||||
computeStreamsAndNextPageUrl();
|
||||
computeInitialTracksAndNextPageUrl();
|
||||
}
|
||||
return new InfoItemsPage<>(streamInfoItemsCollector, getNextPageUrl());
|
||||
return new InfoItemsPage<>(streamInfoItemsCollector, nextPageUrl);
|
||||
}
|
||||
|
||||
private void computeStreamsAndNextPageUrl() throws ExtractionException, IOException {
|
||||
private void computeInitialTracksAndNextPageUrl() throws IOException, ExtractionException {
|
||||
streamInfoItemsCollector = new StreamInfoItemsCollector(getServiceId());
|
||||
StringBuilder nextPageUrlBuilder = new StringBuilder("https://api-v2.soundcloud.com/tracks?client_id=");
|
||||
nextPageUrlBuilder.append(SoundcloudParsingHelper.clientId());
|
||||
nextPageUrlBuilder.append("&ids=");
|
||||
|
||||
// Note the "api", NOT "api-v2"
|
||||
String apiUrl = "https://api.soundcloud.com/playlists/" + getId() + "/tracks"
|
||||
+ "?client_id=" + SoundcloudParsingHelper.clientId()
|
||||
+ "&limit=20"
|
||||
+ "&linked_partitioning=1";
|
||||
JsonArray tracks = playlist.getArray("tracks");
|
||||
for (Object o : tracks) {
|
||||
if (o instanceof JsonObject) {
|
||||
JsonObject track = (JsonObject) o;
|
||||
if (track.has("title")) { // i.e. if full info is available
|
||||
streamInfoItemsCollector.commit(new SoundcloudStreamInfoItemExtractor(track));
|
||||
} else {
|
||||
// %09d would be enough, but a 0 before the number does not create problems, so let's be sure
|
||||
nextPageUrlBuilder.append(String.format("%010d,", track.getInt("id")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
nextPageUrl = SoundcloudParsingHelper.getStreamsFromApiMinItems(15, streamInfoItemsCollector, apiUrl);
|
||||
nextPageUrlBuilder.setLength(nextPageUrlBuilder.length() - 1); // remove trailing ,
|
||||
nextPageUrl = nextPageUrlBuilder.toString();
|
||||
if (nextPageUrl.endsWith("&ids")) {
|
||||
// there are no other videos
|
||||
nextPageUrl = "";
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNextPageUrl() throws IOException, ExtractionException {
|
||||
if (nextPageUrl == null) {
|
||||
computeStreamsAndNextPageUrl();
|
||||
computeInitialTracksAndNextPageUrl();
|
||||
}
|
||||
return nextPageUrl;
|
||||
}
|
||||
@ -141,9 +163,36 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
||||
throw new ExtractionException(new IllegalArgumentException("Page url is empty or null"));
|
||||
}
|
||||
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
String nextPageUrl = SoundcloudParsingHelper.getStreamsFromApiMinItems(15, collector, pageUrl);
|
||||
// see computeInitialTracksAndNextPageUrl
|
||||
final int lengthFirstPartOfUrl = ("https://api-v2.soundcloud.com/tracks?client_id="
|
||||
+ SoundcloudParsingHelper.clientId()
|
||||
+ "&ids=").length();
|
||||
final int lengthOfEveryStream = 11;
|
||||
|
||||
return new InfoItemsPage<>(collector, nextPageUrl);
|
||||
String currentPageUrl, nextUrl;
|
||||
int lengthMaxStreams = lengthFirstPartOfUrl + lengthOfEveryStream * streamsPerRequestedPage;
|
||||
if (pageUrl.length() <= lengthMaxStreams) {
|
||||
currentPageUrl = pageUrl; // fetch every remaining video, there are less than the max
|
||||
nextUrl = ""; // afterwards the list is complete
|
||||
} else {
|
||||
currentPageUrl = pageUrl.substring(0, lengthMaxStreams);
|
||||
nextUrl = pageUrl.substring(0, lengthFirstPartOfUrl) + pageUrl.substring(lengthMaxStreams);
|
||||
}
|
||||
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
String response = NewPipe.getDownloader().get(currentPageUrl, getExtractorLocalization()).responseBody();
|
||||
|
||||
try {
|
||||
JsonArray tracks = JsonParser.array().from(response);
|
||||
for (Object track : tracks) {
|
||||
if (track instanceof JsonObject) {
|
||||
collector.commit(new SoundcloudStreamInfoItemExtractor((JsonObject) track));
|
||||
}
|
||||
}
|
||||
} catch (JsonParserException e) {
|
||||
throw new ParsingException("Could not parse json response", e);
|
||||
}
|
||||
|
||||
return new InfoItemsPage<>(collector, nextUrl);
|
||||
}
|
||||
}
|
||||
|
@ -78,8 +78,7 @@ public class SoundcloudSearchExtractor extends SearchExtractor {
|
||||
}
|
||||
|
||||
private InfoItemsCollector<InfoItem, InfoItemExtractor> collectItems(JsonArray searchCollection) {
|
||||
final InfoItemsSearchCollector collector = getInfoItemSearchCollector();
|
||||
collector.reset();
|
||||
final InfoItemsSearchCollector collector = new InfoItemsSearchCollector(getServiceId());
|
||||
|
||||
for (Object result : searchCollection) {
|
||||
if (!(result instanceof JsonObject)) continue;
|
||||
|
@ -7,11 +7,14 @@ import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||
import org.schabi.newpipe.extractor.linkhandler.*;
|
||||
import org.schabi.newpipe.extractor.localization.ContentCountry;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.AUDIO;
|
||||
|
||||
@ -46,6 +49,13 @@ public class SoundcloudService extends StreamingService {
|
||||
return SoundcloudPlaylistLinkHandlerFactory.getInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ContentCountry> getSupportedCountries() {
|
||||
//Country selector here https://soundcloud.com/charts/top?genre=all-music
|
||||
return ContentCountry.listFrom(
|
||||
"AU", "CA", "DE", "FR", "GB", "IE", "NL", "NZ", "US"
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamExtractor getStreamExtractor(LinkHandler LinkHandler) {
|
||||
|
@ -4,18 +4,26 @@ import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotSupportedException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.stream.*;
|
||||
import org.schabi.newpipe.extractor.stream.AudioStream;
|
||||
import org.schabi.newpipe.extractor.stream.Description;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||
import org.schabi.newpipe.extractor.stream.SubtitlesStream;
|
||||
import org.schabi.newpipe.extractor.stream.VideoStream;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
@ -24,6 +32,8 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public class SoundcloudStreamExtractor extends StreamExtractor {
|
||||
private JsonObject track;
|
||||
|
||||
@ -55,14 +65,14 @@ public class SoundcloudStreamExtractor extends StreamExtractor {
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getTextualUploadDate() {
|
||||
return track.getString("created_at");
|
||||
public String getTextualUploadDate() throws ParsingException {
|
||||
return track.getString("created_at").replace("T"," ").replace("Z", "");
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public DateWrapper getUploadDate() throws ParsingException {
|
||||
return new DateWrapper(SoundcloudParsingHelper.parseDate(getTextualUploadDate()));
|
||||
return new DateWrapper(SoundcloudParsingHelper.parseDate(track.getString("created_at")));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@ -146,24 +156,13 @@ public class SoundcloudStreamExtractor extends StreamExtractor {
|
||||
List<AudioStream> audioStreams = new ArrayList<>();
|
||||
Downloader dl = NewPipe.getDownloader();
|
||||
|
||||
String apiUrl = "https://api-v2.soundcloud.com/tracks/" + urlEncode(getId())
|
||||
+ "?client_id=" + urlEncode(SoundcloudParsingHelper.clientId());
|
||||
|
||||
String response = dl.get(apiUrl, getExtractorLocalization()).responseBody();
|
||||
JsonObject responseObject;
|
||||
try {
|
||||
responseObject = JsonParser.object().from(response);
|
||||
} catch (JsonParserException e) {
|
||||
throw new ParsingException("Could not parse json response", e);
|
||||
}
|
||||
|
||||
// Streams can be streamable and downloadable - or explicitly not.
|
||||
// For playing the track, it is only necessary to have a streamable track.
|
||||
// If this is not the case, this track might not be published yet.
|
||||
if (!responseObject.getBoolean("streamable")) return audioStreams;
|
||||
if (!track.getBoolean("streamable")) return audioStreams;
|
||||
|
||||
try {
|
||||
JsonArray transcodings = responseObject.getObject("media").getArray("transcodings");
|
||||
JsonArray transcodings = track.getObject("media").getArray("transcodings");
|
||||
|
||||
// get information about what stream formats are available
|
||||
for (Object transcoding : transcodings) {
|
||||
@ -198,6 +197,10 @@ public class SoundcloudStreamExtractor extends StreamExtractor {
|
||||
throw new ExtractionException("Could not get SoundCloud's track audio url", e);
|
||||
}
|
||||
|
||||
if (audioStreams.isEmpty()) {
|
||||
throw new ContentNotSupportedException("HLS audio streams are not yet supported");
|
||||
}
|
||||
|
||||
return audioStreams;
|
||||
}
|
||||
|
||||
|
@ -36,7 +36,7 @@ public class SoundcloudSubscriptionExtractor extends SubscriptionExtractor {
|
||||
throw new InvalidSourceException(e);
|
||||
}
|
||||
|
||||
String apiUrl = "https://api.soundcloud.com/users/" + id + "/followings"
|
||||
String apiUrl = "https://api-v2.soundcloud.com/users/" + id + "/followings"
|
||||
+ "?client_id=" + SoundcloudParsingHelper.clientId()
|
||||
+ "&limit=200";
|
||||
ChannelInfoItemsCollector collector = new ChannelInfoItemsCollector(service.getServiceId());
|
||||
|
@ -7,22 +7,45 @@ import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.feed.FeedExtractor;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||
import org.schabi.newpipe.extractor.linkhandler.*;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.localization.ContentCountry;
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.*;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.*;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeCommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeFeedExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeMusicSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubePlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeStreamExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeSubscriptionExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeSuggestionExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeTrendingExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeChannelLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeCommentsLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubePlaylistLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeSearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeStreamLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeTrendingLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.*;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.AUDIO;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.COMMENTS;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.LIVE;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.VIDEO;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 23.08.15.
|
||||
@ -92,7 +115,13 @@ public class YoutubeService extends StreamingService {
|
||||
|
||||
@Override
|
||||
public SearchExtractor getSearchExtractor(SearchQueryHandler query) {
|
||||
return new YoutubeSearchExtractor(this, query);
|
||||
final List<String> contentFilters = query.getContentFilters();
|
||||
|
||||
if (contentFilters.size() > 0 && contentFilters.get(0).startsWith("music_")) {
|
||||
return new YoutubeMusicSearchExtractor(this, query);
|
||||
} else {
|
||||
return new YoutubeSearchExtractor(this, query);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -2,10 +2,10 @@ package org.schabi.newpipe.extractor.services.youtube.extractors;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotSupportedException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
@ -16,13 +16,11 @@ import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper.fixThumbnailUrl;
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper.getJsonResponse;
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper.getTextFromObject;
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper.*;
|
||||
import static org.schabi.newpipe.extractor.utils.JsonUtils.*;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 25.07.16.
|
||||
@ -49,15 +47,64 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
||||
private JsonObject initialData;
|
||||
private JsonObject videoTab;
|
||||
|
||||
/**
|
||||
* Some channels have response redirects and the only way to reliably get the id is by saving it.
|
||||
*<p>
|
||||
* "Movies & Shows":
|
||||
* <pre>
|
||||
* UCuJcl0Ju-gPDoksRjK1ya-w ┐
|
||||
* UChBfWrfBXL9wS6tQtgjt_OQ ├ UClgRkhTL3_hImCAmdLfDE4g
|
||||
* UCok7UTQQEP1Rsctxiv3gwSQ ┘
|
||||
* </pre>
|
||||
*/
|
||||
private String redirectedChannelId;
|
||||
|
||||
public YoutubeChannelExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
final String url = super.getUrl() + "/videos?pbj=1&view=0&flow=grid";
|
||||
String url = super.getUrl() + "/videos?pbj=1&view=0&flow=grid";
|
||||
JsonArray ajaxJson = null;
|
||||
|
||||
int level = 0;
|
||||
while (level < 3) {
|
||||
final JsonArray jsonResponse = getJsonResponse(url, getExtractorLocalization());
|
||||
|
||||
final JsonObject endpoint = jsonResponse.getObject(1, EMPTY_OBJECT)
|
||||
.getObject("response", EMPTY_OBJECT).getArray("onResponseReceivedActions", EMPTY_ARRAY)
|
||||
.getObject(0, EMPTY_OBJECT).getObject("navigateAction", EMPTY_OBJECT)
|
||||
.getObject("endpoint", EMPTY_OBJECT);
|
||||
|
||||
final String webPageType = endpoint
|
||||
.getObject("commandMetadata", EMPTY_OBJECT)
|
||||
.getObject("webCommandMetadata", EMPTY_OBJECT)
|
||||
.getString("webPageType", EMPTY_STRING);
|
||||
|
||||
final String browseId = endpoint
|
||||
.getObject("browseEndpoint", EMPTY_OBJECT)
|
||||
.getString("browseId", EMPTY_STRING);
|
||||
|
||||
if (webPageType.equalsIgnoreCase("WEB_PAGE_TYPE_BROWSE") && !browseId.isEmpty()) {
|
||||
|
||||
if (!browseId.startsWith("UC")) {
|
||||
throw new ExtractionException("Redirected id is not pointing to a channel");
|
||||
}
|
||||
|
||||
url = "https://www.youtube.com/channel/" + browseId + "/videos?pbj=1&view=0&flow=grid";
|
||||
redirectedChannelId = browseId;
|
||||
level++;
|
||||
} else {
|
||||
ajaxJson = jsonResponse;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (ajaxJson == null) {
|
||||
throw new ExtractionException("Could not fetch initial JSON data");
|
||||
}
|
||||
|
||||
final JsonArray ajaxJson = getJsonResponse(url, getExtractorLocalization());
|
||||
initialData = ajaxJson.getObject(1).getObject("response");
|
||||
YoutubeParsingHelper.defaultAlertsCheck(initialData);
|
||||
}
|
||||
@ -84,10 +131,17 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getId() throws ParsingException {
|
||||
try {
|
||||
return initialData.getObject("header").getObject("c4TabbedHeaderRenderer").getString("channelId");
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get channel id", e);
|
||||
final String channelId = initialData
|
||||
.getObject("header", EMPTY_OBJECT)
|
||||
.getObject("c4TabbedHeaderRenderer", EMPTY_OBJECT)
|
||||
.getString("channelId", EMPTY_STRING);
|
||||
|
||||
if (!channelId.isEmpty()) {
|
||||
return channelId;
|
||||
} else if (redirectedChannelId != null && !redirectedChannelId.isEmpty()) {
|
||||
return redirectedChannelId;
|
||||
} else {
|
||||
throw new ParsingException("Could not get channel id");
|
||||
}
|
||||
}
|
||||
|
||||
@ -257,7 +311,7 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
||||
}
|
||||
|
||||
if (videoTab == null) {
|
||||
throw new ParsingException("Could not find Videos tab");
|
||||
throw new ContentNotSupportedException("This channel has no Videos tab");
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -70,8 +70,14 @@ public class YoutubeChannelInfoItemExtractor implements ChannelInfoItemExtractor
|
||||
@Override
|
||||
public long getSubscriberCount() throws ParsingException {
|
||||
try {
|
||||
String subscribers = getTextFromObject(channelInfoItem.getObject("subscriberCountText"));
|
||||
return Utils.mixedNumberWordToLong(subscribers);
|
||||
final JsonObject subscriberCountObject = channelInfoItem.getObject("subscriberCountText");
|
||||
|
||||
if (subscriberCountObject == null) {
|
||||
// Subscription count is not available for this channel item.
|
||||
return -1;
|
||||
}
|
||||
|
||||
return Utils.mixedNumberWordToLong(getTextFromObject(subscriberCountObject));
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get subscriber count", e);
|
||||
}
|
||||
@ -80,7 +86,14 @@ public class YoutubeChannelInfoItemExtractor implements ChannelInfoItemExtractor
|
||||
@Override
|
||||
public long getStreamCount() throws ParsingException {
|
||||
try {
|
||||
return Long.parseLong(Utils.removeNonDigitCharacters(getTextFromObject(channelInfoItem.getObject("videoCountText"))));
|
||||
final JsonObject videoCountObject = channelInfoItem.getObject("videoCountText");
|
||||
|
||||
if (videoCountObject == null) {
|
||||
// Video count is not available, channel probably has no public uploads.
|
||||
return -1;
|
||||
}
|
||||
|
||||
return Long.parseLong(Utils.removeNonDigitCharacters(getTextFromObject(videoCountObject)));
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get stream count", e);
|
||||
}
|
||||
@ -89,7 +102,14 @@ public class YoutubeChannelInfoItemExtractor implements ChannelInfoItemExtractor
|
||||
@Override
|
||||
public String getDescription() throws ParsingException {
|
||||
try {
|
||||
return getTextFromObject(channelInfoItem.getObject("descriptionSnippet"));
|
||||
final JsonObject descriptionObject = channelInfoItem.getObject("descriptionSnippet");
|
||||
|
||||
if (descriptionObject == null) {
|
||||
// Channel have no description.
|
||||
return null;
|
||||
}
|
||||
|
||||
return getTextFromObject(descriptionObject);
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get description", e);
|
||||
}
|
||||
|
@ -0,0 +1,494 @@
|
||||
package org.schabi.newpipe.extractor.services.youtube.extractors;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import com.grack.nanojson.JsonWriter;
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||
import org.schabi.newpipe.extractor.search.InfoItemsSearchCollector;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper.fixThumbnailUrl;
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper.getValidJsonResponseBody;
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper.getTextFromObject;
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper.getUrlFromNavigationEndpoint;
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeSearchQueryHandlerFactory.MUSIC_ALBUMS;
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeSearchQueryHandlerFactory.MUSIC_ARTISTS;
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeSearchQueryHandlerFactory.MUSIC_PLAYLISTS;
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeSearchQueryHandlerFactory.MUSIC_SONGS;
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeSearchQueryHandlerFactory.MUSIC_VIDEOS;
|
||||
|
||||
public class YoutubeMusicSearchExtractor extends SearchExtractor {
|
||||
private JsonObject initialData;
|
||||
|
||||
public YoutubeMusicSearchExtractor(final StreamingService service, final SearchQueryHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull final Downloader downloader) throws IOException, ExtractionException {
|
||||
final String[] youtubeMusicKeys = YoutubeParsingHelper.getYoutubeMusicKeys();
|
||||
|
||||
final String url = "https://music.youtube.com/youtubei/v1/search?alt=json&key=" + youtubeMusicKeys[0];
|
||||
|
||||
final String params;
|
||||
|
||||
switch (getLinkHandler().getContentFilters().get(0)) {
|
||||
case MUSIC_SONGS:
|
||||
params = "Eg-KAQwIARAAGAAgACgAMABqChAEEAUQAxAKEAk%3D";
|
||||
break;
|
||||
case MUSIC_VIDEOS:
|
||||
params = "Eg-KAQwIABABGAAgACgAMABqChAEEAUQAxAKEAk%3D";
|
||||
break;
|
||||
case MUSIC_ALBUMS:
|
||||
params = "Eg-KAQwIABAAGAEgACgAMABqChAEEAUQAxAKEAk%3D";
|
||||
break;
|
||||
case MUSIC_PLAYLISTS:
|
||||
params = "Eg-KAQwIABAAGAAgACgBMABqChAEEAUQAxAKEAk%3D";
|
||||
break;
|
||||
case MUSIC_ARTISTS:
|
||||
params = "Eg-KAQwIABAAGAAgASgAMABqChAEEAUQAxAKEAk%3D";
|
||||
break;
|
||||
default:
|
||||
params = null;
|
||||
break;
|
||||
}
|
||||
|
||||
// @formatter:off
|
||||
byte[] json = JsonWriter.string()
|
||||
.object()
|
||||
.object("context")
|
||||
.object("client")
|
||||
.value("clientName", "WEB_REMIX")
|
||||
.value("clientVersion", youtubeMusicKeys[2])
|
||||
.value("hl", "en")
|
||||
.value("gl", getExtractorContentCountry().getCountryCode())
|
||||
.array("experimentIds").end()
|
||||
.value("experimentsToken", "")
|
||||
.value("utcOffsetMinutes", 0)
|
||||
.object("locationInfo").end()
|
||||
.object("musicAppInfo").end()
|
||||
.end()
|
||||
.object("capabilities").end()
|
||||
.object("request")
|
||||
.array("internalExperimentFlags").end()
|
||||
.object("sessionIndex").end()
|
||||
.end()
|
||||
.object("activePlayers").end()
|
||||
.object("user")
|
||||
.value("enableSafetyMode", false)
|
||||
.end()
|
||||
.end()
|
||||
.value("query", getSearchString())
|
||||
.value("params", params)
|
||||
.end().done().getBytes("UTF-8");
|
||||
// @formatter:on
|
||||
|
||||
final Map<String, List<String>> headers = new HashMap<>();
|
||||
headers.put("X-YouTube-Client-Name", Collections.singletonList(youtubeMusicKeys[1]));
|
||||
headers.put("X-YouTube-Client-Version", Collections.singletonList(youtubeMusicKeys[2]));
|
||||
headers.put("Origin", Collections.singletonList("https://music.youtube.com"));
|
||||
headers.put("Referer", Collections.singletonList("music.youtube.com"));
|
||||
headers.put("Content-Type", Collections.singletonList("application/json"));
|
||||
|
||||
final String responseBody = getValidJsonResponseBody(getDownloader().post(url, headers, json));
|
||||
|
||||
try {
|
||||
initialData = JsonParser.object().from(responseBody);
|
||||
} catch (JsonParserException e) {
|
||||
throw new ParsingException("Could not parse JSON", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
return super.getUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSearchSuggestion() throws ParsingException {
|
||||
final JsonObject itemSectionRenderer = initialData.getObject("contents").getObject("sectionListRenderer")
|
||||
.getArray("contents").getObject(0).getObject("itemSectionRenderer");
|
||||
if (itemSectionRenderer == null) {
|
||||
return "";
|
||||
}
|
||||
final JsonObject didYouMeanRenderer = itemSectionRenderer.getArray("contents")
|
||||
.getObject(0).getObject("didYouMeanRenderer");
|
||||
if (didYouMeanRenderer == null) {
|
||||
return "";
|
||||
}
|
||||
return getTextFromObject(didYouMeanRenderer.getObject("correctedQuery"));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getInitialPage() throws ExtractionException, IOException {
|
||||
final InfoItemsSearchCollector collector = new InfoItemsSearchCollector(getServiceId());
|
||||
|
||||
final JsonArray contents = initialData.getObject("contents").getObject("sectionListRenderer").getArray("contents");
|
||||
|
||||
for (Object content : contents) {
|
||||
if (((JsonObject) content).getObject("musicShelfRenderer") != null) {
|
||||
collectMusicStreamsFrom(collector, ((JsonObject) content).getObject("musicShelfRenderer").getArray("contents"));
|
||||
}
|
||||
}
|
||||
|
||||
return new InfoItemsPage<>(collector, getNextPageUrl());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNextPageUrl() throws ExtractionException, IOException {
|
||||
final JsonArray contents = initialData.getObject("contents").getObject("sectionListRenderer").getArray("contents");
|
||||
|
||||
for (Object content : contents) {
|
||||
if (((JsonObject) content).getObject("musicShelfRenderer") != null) {
|
||||
return getNextPageUrlFrom(((JsonObject) content).getObject("musicShelfRenderer").getArray("continuations"));
|
||||
}
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getPage(final String pageUrl) throws IOException, ExtractionException {
|
||||
if (pageUrl == null || pageUrl.isEmpty()) {
|
||||
throw new ExtractionException(new IllegalArgumentException("Page url is empty or null"));
|
||||
}
|
||||
|
||||
final InfoItemsSearchCollector collector = new InfoItemsSearchCollector(getServiceId());
|
||||
|
||||
final String[] youtubeMusicKeys = YoutubeParsingHelper.getYoutubeMusicKeys();
|
||||
|
||||
// @formatter:off
|
||||
byte[] json = JsonWriter.string()
|
||||
.object()
|
||||
.object("context")
|
||||
.object("client")
|
||||
.value("clientName", "WEB_REMIX")
|
||||
.value("clientVersion", youtubeMusicKeys[2])
|
||||
.value("hl", "en")
|
||||
.value("gl", getExtractorContentCountry().getCountryCode())
|
||||
.array("experimentIds").end()
|
||||
.value("experimentsToken", "")
|
||||
.value("utcOffsetMinutes", 0)
|
||||
.object("locationInfo").end()
|
||||
.object("musicAppInfo").end()
|
||||
.end()
|
||||
.object("capabilities").end()
|
||||
.object("request")
|
||||
.array("internalExperimentFlags").end()
|
||||
.object("sessionIndex").end()
|
||||
.end()
|
||||
.object("activePlayers").end()
|
||||
.object("user")
|
||||
.value("enableSafetyMode", false)
|
||||
.end()
|
||||
.end()
|
||||
.end().done().getBytes("UTF-8");
|
||||
// @formatter:on
|
||||
|
||||
final Map<String, List<String>> headers = new HashMap<>();
|
||||
headers.put("X-YouTube-Client-Name", Collections.singletonList(youtubeMusicKeys[1]));
|
||||
headers.put("X-YouTube-Client-Version", Collections.singletonList(youtubeMusicKeys[2]));
|
||||
headers.put("Origin", Collections.singletonList("https://music.youtube.com"));
|
||||
headers.put("Referer", Collections.singletonList("music.youtube.com"));
|
||||
headers.put("Content-Type", Collections.singletonList("application/json"));
|
||||
|
||||
final String responseBody = getValidJsonResponseBody(getDownloader().post(pageUrl, headers, json));
|
||||
|
||||
final JsonObject ajaxJson;
|
||||
try {
|
||||
ajaxJson = JsonParser.object().from(responseBody);
|
||||
} catch (JsonParserException e) {
|
||||
throw new ParsingException("Could not parse JSON", e);
|
||||
}
|
||||
|
||||
if (ajaxJson.getObject("continuationContents") == null) {
|
||||
return InfoItemsPage.emptyPage();
|
||||
}
|
||||
|
||||
final JsonObject musicShelfContinuation = ajaxJson.getObject("continuationContents").getObject("musicShelfContinuation");
|
||||
|
||||
collectMusicStreamsFrom(collector, musicShelfContinuation.getArray("contents"));
|
||||
final JsonArray continuations = musicShelfContinuation.getArray("continuations");
|
||||
|
||||
return new InfoItemsPage<>(collector, getNextPageUrlFrom(continuations));
|
||||
}
|
||||
|
||||
private void collectMusicStreamsFrom(final InfoItemsSearchCollector collector, final JsonArray videos) {
|
||||
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
||||
|
||||
for (Object item : videos) {
|
||||
final JsonObject info = ((JsonObject) item).getObject("musicResponsiveListItemRenderer");
|
||||
if (info != null) {
|
||||
final String searchType = getLinkHandler().getContentFilters().get(0);
|
||||
if (searchType.equals(MUSIC_SONGS) || searchType.equals(MUSIC_VIDEOS)) {
|
||||
collector.commit(new YoutubeStreamInfoItemExtractor(info, timeAgoParser) {
|
||||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
final String url = getUrlFromNavigationEndpoint(info.getObject("doubleTapCommand"));
|
||||
if (url != null && !url.isEmpty()) {
|
||||
return url;
|
||||
}
|
||||
throw new ParsingException("Could not get url");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
final String name = getTextFromObject(info.getArray("flexColumns").getObject(0)
|
||||
.getObject("musicResponsiveListItemFlexColumnRenderer").getObject("text"));
|
||||
if (name != null && !name.isEmpty()) {
|
||||
return name;
|
||||
}
|
||||
throw new ParsingException("Could not get name");
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getDuration() throws ParsingException {
|
||||
final String duration = getTextFromObject(info.getArray("flexColumns").getObject(3)
|
||||
.getObject("musicResponsiveListItemFlexColumnRenderer").getObject("text"));
|
||||
if (duration != null && !duration.isEmpty()) {
|
||||
return YoutubeParsingHelper.parseDurationString(duration);
|
||||
}
|
||||
throw new ParsingException("Could not get duration");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploaderName() throws ParsingException {
|
||||
final String name = getTextFromObject(info.getArray("flexColumns").getObject(1)
|
||||
.getObject("musicResponsiveListItemFlexColumnRenderer").getObject("text"));
|
||||
if (name != null && !name.isEmpty()) {
|
||||
return name;
|
||||
}
|
||||
throw new ParsingException("Could not get uploader name");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploaderUrl() throws ParsingException {
|
||||
if (searchType.equals(MUSIC_VIDEOS)) {
|
||||
JsonArray items = info.getObject("menu").getObject("menuRenderer").getArray("items");
|
||||
for (Object item : items) {
|
||||
final JsonObject menuNavigationItemRenderer = ((JsonObject) item).getObject("menuNavigationItemRenderer");
|
||||
if (menuNavigationItemRenderer != null && menuNavigationItemRenderer.getObject("icon").getString("iconType").equals("ARTIST")) {
|
||||
return getUrlFromNavigationEndpoint(menuNavigationItemRenderer.getObject("navigationEndpoint"));
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
} else {
|
||||
final JsonObject navigationEndpoint = info.getArray("flexColumns")
|
||||
.getObject(1).getObject("musicResponsiveListItemFlexColumnRenderer")
|
||||
.getObject("text").getArray("runs").getObject(0).getObject("navigationEndpoint");
|
||||
|
||||
if (navigationEndpoint == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final String url = getUrlFromNavigationEndpoint(navigationEndpoint);
|
||||
|
||||
if (url != null && !url.isEmpty()) {
|
||||
return url;
|
||||
}
|
||||
throw new ParsingException("Could not get uploader url");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTextualUploadDate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateWrapper getUploadDate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getViewCount() throws ParsingException {
|
||||
if (searchType.equals(MUSIC_SONGS)) {
|
||||
return -1;
|
||||
}
|
||||
final String viewCount = getTextFromObject(info.getArray("flexColumns").getObject(2)
|
||||
.getObject("musicResponsiveListItemFlexColumnRenderer").getObject("text"));
|
||||
if (viewCount != null && !viewCount.isEmpty()) {
|
||||
return Utils.mixedNumberWordToLong(viewCount);
|
||||
}
|
||||
throw new ParsingException("Could not get view count");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getThumbnailUrl() throws ParsingException {
|
||||
try {
|
||||
final JsonArray thumbnails = info.getObject("thumbnail").getObject("musicThumbnailRenderer")
|
||||
.getObject("thumbnail").getArray("thumbnails");
|
||||
// the last thumbnail is the one with the highest resolution
|
||||
final String url = thumbnails.getObject(thumbnails.size() - 1).getString("url");
|
||||
|
||||
return fixThumbnailUrl(url);
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get thumbnail url", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (searchType.equals(MUSIC_ARTISTS)) {
|
||||
collector.commit(new YoutubeChannelInfoItemExtractor(info) {
|
||||
@Override
|
||||
public String getThumbnailUrl() throws ParsingException {
|
||||
try {
|
||||
final JsonArray thumbnails = info.getObject("thumbnail").getObject("musicThumbnailRenderer")
|
||||
.getObject("thumbnail").getArray("thumbnails");
|
||||
// the last thumbnail is the one with the highest resolution
|
||||
final String url = thumbnails.getObject(thumbnails.size() - 1).getString("url");
|
||||
|
||||
return fixThumbnailUrl(url);
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get thumbnail url", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
final String name = getTextFromObject(info.getArray("flexColumns").getObject(0)
|
||||
.getObject("musicResponsiveListItemFlexColumnRenderer").getObject("text"));
|
||||
if (name != null && !name.isEmpty()) {
|
||||
return name;
|
||||
}
|
||||
throw new ParsingException("Could not get name");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
final String url = getUrlFromNavigationEndpoint(info.getObject("navigationEndpoint"));
|
||||
if (url != null && !url.isEmpty()) {
|
||||
return url;
|
||||
}
|
||||
throw new ParsingException("Could not get url");
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSubscriberCount() throws ParsingException {
|
||||
final String viewCount = getTextFromObject(info.getArray("flexColumns").getObject(2)
|
||||
.getObject("musicResponsiveListItemFlexColumnRenderer").getObject("text"));
|
||||
if (viewCount != null && !viewCount.isEmpty()) {
|
||||
return Utils.mixedNumberWordToLong(viewCount);
|
||||
}
|
||||
throw new ParsingException("Could not get subscriber count");
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getStreamCount() {
|
||||
return -1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} else if (searchType.equals(MUSIC_ALBUMS) || searchType.equals(MUSIC_PLAYLISTS)) {
|
||||
collector.commit(new YoutubePlaylistInfoItemExtractor(info) {
|
||||
@Override
|
||||
public String getThumbnailUrl() throws ParsingException {
|
||||
try {
|
||||
final JsonArray thumbnails = info.getObject("thumbnail").getObject("musicThumbnailRenderer")
|
||||
.getObject("thumbnail").getArray("thumbnails");
|
||||
// the last thumbnail is the one with the highest resolution
|
||||
final String url = thumbnails.getObject(thumbnails.size() - 1).getString("url");
|
||||
|
||||
return fixThumbnailUrl(url);
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get thumbnail url", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
final String name = getTextFromObject(info.getArray("flexColumns").getObject(0)
|
||||
.getObject("musicResponsiveListItemFlexColumnRenderer").getObject("text"));
|
||||
if (name != null && !name.isEmpty()) {
|
||||
return name;
|
||||
}
|
||||
throw new ParsingException("Could not get name");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
final String url = getUrlFromNavigationEndpoint(info.getObject("doubleTapCommand"));
|
||||
if (url != null && !url.isEmpty()) {
|
||||
return url;
|
||||
}
|
||||
throw new ParsingException("Could not get url");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploaderName() throws ParsingException {
|
||||
final String name;
|
||||
if (searchType.equals(MUSIC_ALBUMS)) {
|
||||
name = getTextFromObject(info.getArray("flexColumns").getObject(2)
|
||||
.getObject("musicResponsiveListItemFlexColumnRenderer").getObject("text"));
|
||||
} else {
|
||||
name = getTextFromObject(info.getArray("flexColumns").getObject(1)
|
||||
.getObject("musicResponsiveListItemFlexColumnRenderer").getObject("text"));
|
||||
}
|
||||
if (name != null && !name.isEmpty()) {
|
||||
return name;
|
||||
}
|
||||
throw new ParsingException("Could not get uploader name");
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getStreamCount() throws ParsingException {
|
||||
if (searchType.equals(MUSIC_ALBUMS)) {
|
||||
return ITEM_COUNT_UNKNOWN;
|
||||
}
|
||||
final String count = getTextFromObject(info.getArray("flexColumns").getObject(2)
|
||||
.getObject("musicResponsiveListItemFlexColumnRenderer").getObject("text"));
|
||||
if (count != null && !count.isEmpty()) {
|
||||
if (count.contains("100+")) {
|
||||
return ITEM_COUNT_MORE_THAN_100;
|
||||
} else {
|
||||
return Long.parseLong(Utils.removeNonDigitCharacters(count));
|
||||
}
|
||||
}
|
||||
throw new ParsingException("Could not get count");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String getNextPageUrlFrom(final JsonArray continuations) throws ParsingException, IOException, ReCaptchaException {
|
||||
if (continuations == null) {
|
||||
return "";
|
||||
}
|
||||
|
||||
final JsonObject nextContinuationData = continuations.getObject(0).getObject("nextContinuationData");
|
||||
final String continuation = nextContinuationData.getString("continuation");
|
||||
final String clickTrackingParams = nextContinuationData.getString("clickTrackingParams");
|
||||
|
||||
return "https://music.youtube.com/youtubei/v1/search?ctoken=" + continuation + "&continuation=" + continuation
|
||||
+ "&itct=" + clickTrackingParams + "&alt=json&key=" + YoutubeParsingHelper.getYoutubeMusicKeys()[0];
|
||||
}
|
||||
}
|
@ -43,12 +43,12 @@ import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeP
|
||||
public class YoutubeSearchExtractor extends SearchExtractor {
|
||||
private JsonObject initialData;
|
||||
|
||||
public YoutubeSearchExtractor(StreamingService service, SearchQueryHandler linkHandler) {
|
||||
public YoutubeSearchExtractor(final StreamingService service, final SearchQueryHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
public void onFetchPage(@Nonnull final Downloader downloader) throws IOException, ExtractionException {
|
||||
final String url = getUrl() + "&pbj=1";
|
||||
|
||||
final JsonArray ajaxJson = getJsonResponse(url, getExtractorLocalization());
|
||||
@ -64,23 +64,23 @@ public class YoutubeSearchExtractor extends SearchExtractor {
|
||||
|
||||
@Override
|
||||
public String getSearchSuggestion() throws ParsingException {
|
||||
JsonObject showingResultsForRenderer = initialData.getObject("contents")
|
||||
final JsonObject showingResultsForRenderer = initialData.getObject("contents")
|
||||
.getObject("twoColumnSearchResultsRenderer").getObject("primaryContents")
|
||||
.getObject("sectionListRenderer").getArray("contents").getObject(0)
|
||||
.getObject("itemSectionRenderer").getArray("contents").getObject(0)
|
||||
.getObject("showingResultsForRenderer");
|
||||
if (showingResultsForRenderer == null) {
|
||||
return "";
|
||||
} else {
|
||||
return getTextFromObject(showingResultsForRenderer.getObject("correctedQuery"));
|
||||
}
|
||||
return getTextFromObject(showingResultsForRenderer.getObject("correctedQuery"));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getInitialPage() throws ExtractionException {
|
||||
InfoItemsSearchCollector collector = getInfoItemSearchCollector();
|
||||
JsonArray sections = initialData.getObject("contents").getObject("twoColumnSearchResultsRenderer")
|
||||
final InfoItemsSearchCollector collector = new InfoItemsSearchCollector(getServiceId());
|
||||
|
||||
final JsonArray sections = initialData.getObject("contents").getObject("twoColumnSearchResultsRenderer")
|
||||
.getObject("primaryContents").getObject("sectionListRenderer").getArray("contents");
|
||||
|
||||
for (Object section : sections) {
|
||||
@ -98,25 +98,24 @@ public class YoutubeSearchExtractor extends SearchExtractor {
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
public InfoItemsPage<InfoItem> getPage(final String pageUrl) throws IOException, ExtractionException {
|
||||
if (pageUrl == null || pageUrl.isEmpty()) {
|
||||
throw new ExtractionException(new IllegalArgumentException("Page url is empty or null"));
|
||||
}
|
||||
|
||||
InfoItemsSearchCollector collector = getInfoItemSearchCollector();
|
||||
final InfoItemsSearchCollector collector = new InfoItemsSearchCollector(getServiceId());
|
||||
final JsonArray ajaxJson = getJsonResponse(pageUrl, getExtractorLocalization());
|
||||
|
||||
JsonObject itemSectionRenderer = ajaxJson.getObject(1).getObject("response")
|
||||
final JsonObject itemSectionRenderer = ajaxJson.getObject(1).getObject("response")
|
||||
.getObject("continuationContents").getObject("itemSectionContinuation");
|
||||
|
||||
collectStreamsFrom(collector, itemSectionRenderer.getArray("contents"));
|
||||
final JsonArray continuations = itemSectionRenderer.getArray("continuations");
|
||||
|
||||
return new InfoItemsPage<>(collector, getNextPageUrlFrom(itemSectionRenderer.getArray("continuations")));
|
||||
return new InfoItemsPage<>(collector, getNextPageUrlFrom(continuations));
|
||||
}
|
||||
|
||||
private void collectStreamsFrom(InfoItemsSearchCollector collector, JsonArray videos) throws NothingFoundException, ParsingException {
|
||||
collector.reset();
|
||||
|
||||
private void collectStreamsFrom(final InfoItemsSearchCollector collector, final JsonArray videos) throws NothingFoundException, ParsingException {
|
||||
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
||||
|
||||
for (Object item : videos) {
|
||||
@ -133,14 +132,15 @@ public class YoutubeSearchExtractor extends SearchExtractor {
|
||||
}
|
||||
}
|
||||
|
||||
private String getNextPageUrlFrom(JsonArray continuations) throws ParsingException {
|
||||
private String getNextPageUrlFrom(final JsonArray continuations) throws ParsingException {
|
||||
if (continuations == null) {
|
||||
return "";
|
||||
}
|
||||
|
||||
JsonObject nextContinuationData = continuations.getObject(0).getObject("nextContinuationData");
|
||||
String continuation = nextContinuationData.getString("continuation");
|
||||
String clickTrackingParams = nextContinuationData.getString("clickTrackingParams");
|
||||
final JsonObject nextContinuationData = continuations.getObject(0).getObject("nextContinuationData");
|
||||
final String continuation = nextContinuationData.getString("continuation");
|
||||
final String clickTrackingParams = nextContinuationData.getString("clickTrackingParams");
|
||||
|
||||
return getUrl() + "&pbj=1&ctoken=" + continuation + "&continuation=" + continuation
|
||||
+ "&itct=" + clickTrackingParams;
|
||||
}
|
||||
|
@ -279,6 +279,8 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
||||
if (views == null) throw new ParsingException("Could not get view count");
|
||||
}
|
||||
|
||||
if (views.toLowerCase().contains("no views")) return 0;
|
||||
|
||||
return Long.parseLong(Utils.removeNonDigitCharacters(views));
|
||||
}
|
||||
|
||||
@ -629,7 +631,7 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
||||
|
||||
playerResponse = getPlayerResponse();
|
||||
|
||||
final JsonObject playabilityStatus = playerResponse.getObject("playabilityStatus", JsonUtils.DEFAULT_EMPTY);
|
||||
final JsonObject playabilityStatus = playerResponse.getObject("playabilityStatus", JsonUtils.EMPTY_OBJECT);
|
||||
final String status = playabilityStatus.getString("status");
|
||||
// If status exist, and is not "OK", throw a ContentNotAvailableException with the reason.
|
||||
if (status != null && !status.toLowerCase().equals("ok")) {
|
||||
|
@ -5,6 +5,8 @@ import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import com.grack.nanojson.JsonWriter;
|
||||
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
@ -18,6 +20,7 @@ import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.net.URLDecoder;
|
||||
import java.text.ParseException;
|
||||
@ -62,6 +65,9 @@ public class YoutubeParsingHelper {
|
||||
private static final String HARDCODED_CLIENT_VERSION = "2.20200214.04.00";
|
||||
private static String clientVersion;
|
||||
|
||||
private static final String[] HARDCODED_YOUTUBE_MUSIC_KEYS = {"AIzaSyC9XL3ZjWddXya6X74dJoCTL-WEYFDNX30", "67", "0.1"};
|
||||
private static String[] youtubeMusicKeys;
|
||||
|
||||
private static final String FEED_BASE_CHANNEL_ID = "https://www.youtube.com/feeds/videos.xml?channel_id=";
|
||||
private static final String FEED_BASE_USER = "https://www.youtube.com/feeds/videos.xml?user=";
|
||||
|
||||
@ -196,11 +202,7 @@ public class YoutubeParsingHelper {
|
||||
*/
|
||||
public static String getClientVersion() throws IOException, ExtractionException {
|
||||
if (clientVersion != null && !clientVersion.isEmpty()) return clientVersion;
|
||||
|
||||
if (isHardcodedClientVersionValid()) {
|
||||
clientVersion = HARDCODED_CLIENT_VERSION;
|
||||
return clientVersion;
|
||||
}
|
||||
if (isHardcodedClientVersionValid()) return clientVersion = HARDCODED_CLIENT_VERSION;
|
||||
|
||||
final String url = "https://www.youtube.com/results?search_query=test";
|
||||
final String html = getDownloader().get(url).responseBody();
|
||||
@ -217,8 +219,7 @@ public class YoutubeParsingHelper {
|
||||
JsonObject p = (JsonObject) param;
|
||||
String key = p.getString("key");
|
||||
if (key != null && key.equals("cver")) {
|
||||
clientVersion = p.getString("value");
|
||||
return clientVersion;
|
||||
return clientVersion = p.getString("value");
|
||||
}
|
||||
}
|
||||
} else if (s.getString("service").equals("ECATCHER")) {
|
||||
@ -244,21 +245,94 @@ public class YoutubeParsingHelper {
|
||||
try {
|
||||
contextClientVersion = Parser.matchGroup1(pattern, html);
|
||||
if (contextClientVersion != null && !contextClientVersion.isEmpty()) {
|
||||
clientVersion = contextClientVersion;
|
||||
return clientVersion;
|
||||
return clientVersion = contextClientVersion;
|
||||
}
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
}
|
||||
|
||||
if (shortClientVersion != null) {
|
||||
clientVersion = shortClientVersion;
|
||||
return clientVersion;
|
||||
return clientVersion = shortClientVersion;
|
||||
}
|
||||
|
||||
throw new ParsingException("Could not get client version");
|
||||
}
|
||||
|
||||
public static boolean areHardcodedYoutubeMusicKeysValid() throws IOException, ReCaptchaException {
|
||||
final String url = "https://music.youtube.com/youtubei/v1/search?alt=json&key=" + HARDCODED_YOUTUBE_MUSIC_KEYS[0];
|
||||
|
||||
// @formatter:off
|
||||
byte[] json = JsonWriter.string()
|
||||
.object()
|
||||
.object("context")
|
||||
.object("client")
|
||||
.value("clientName", "WEB_REMIX")
|
||||
.value("clientVersion", HARDCODED_YOUTUBE_MUSIC_KEYS[2])
|
||||
.value("hl", "en")
|
||||
.value("gl", "GB")
|
||||
.array("experimentIds").end()
|
||||
.value("experimentsToken", "")
|
||||
.value("utcOffsetMinutes", 0)
|
||||
.object("locationInfo").end()
|
||||
.object("musicAppInfo").end()
|
||||
.end()
|
||||
.object("capabilities").end()
|
||||
.object("request")
|
||||
.array("internalExperimentFlags").end()
|
||||
.object("sessionIndex").end()
|
||||
.end()
|
||||
.object("activePlayers").end()
|
||||
.object("user")
|
||||
.value("enableSafetyMode", false)
|
||||
.end()
|
||||
.end()
|
||||
.value("query", "test")
|
||||
.value("params", "Eg-KAQwIARAAGAAgACgAMABqChAEEAUQAxAKEAk%3D")
|
||||
.end().done().getBytes("UTF-8");
|
||||
// @formatter:on
|
||||
|
||||
Map<String, List<String>> headers = new HashMap<>();
|
||||
headers.put("X-YouTube-Client-Name", Collections.singletonList(HARDCODED_YOUTUBE_MUSIC_KEYS[1]));
|
||||
headers.put("X-YouTube-Client-Version", Collections.singletonList(HARDCODED_YOUTUBE_MUSIC_KEYS[2]));
|
||||
headers.put("Origin", Collections.singletonList("https://music.youtube.com"));
|
||||
headers.put("Referer", Collections.singletonList("music.youtube.com"));
|
||||
headers.put("Content-Type", Collections.singletonList("application/json"));
|
||||
|
||||
String response = getDownloader().post(url, headers, json).responseBody();
|
||||
|
||||
return response.length() > 50; // ensure to have a valid response
|
||||
}
|
||||
|
||||
public static String[] getYoutubeMusicKeys() throws IOException, ReCaptchaException, Parser.RegexException {
|
||||
if (youtubeMusicKeys != null && youtubeMusicKeys.length == 3) return youtubeMusicKeys;
|
||||
if (areHardcodedYoutubeMusicKeysValid()) return youtubeMusicKeys = HARDCODED_YOUTUBE_MUSIC_KEYS;
|
||||
|
||||
final String url = "https://music.youtube.com/";
|
||||
final String html = getDownloader().get(url).responseBody();
|
||||
|
||||
String key;
|
||||
try {
|
||||
key = Parser.matchGroup1("INNERTUBE_API_KEY\":\"([0-9a-zA-Z_-]+?)\"", html);
|
||||
} catch (Parser.RegexException e) {
|
||||
key = Parser.matchGroup1("innertube_api_key\":\"([0-9a-zA-Z_-]+?)\"", html);
|
||||
}
|
||||
|
||||
final String clientName = Parser.matchGroup1("INNERTUBE_CONTEXT_CLIENT_NAME\":([0-9]+?),", html);
|
||||
|
||||
String clientVersion;
|
||||
try {
|
||||
clientVersion = Parser.matchGroup1("INNERTUBE_CONTEXT_CLIENT_VERSION\":\"([0-9\\.]+?)\"", html);
|
||||
} catch (Parser.RegexException e) {
|
||||
try {
|
||||
clientVersion = Parser.matchGroup1("INNERTUBE_CLIENT_VERSION\":\"([0-9\\.]+?)\"", html);
|
||||
} catch (Parser.RegexException ee) {
|
||||
clientVersion = Parser.matchGroup1("innertube_context_client_version\":\"([0-9\\.]+?)\"", html);
|
||||
}
|
||||
}
|
||||
|
||||
return youtubeMusicKeys = new String[]{key, clientName, clientVersion};
|
||||
}
|
||||
|
||||
public static String getUrlFromNavigationEndpoint(JsonObject navigationEndpoint) throws ParsingException {
|
||||
if (navigationEndpoint.getObject("urlEndpoint") != null) {
|
||||
String internUrl = navigationEndpoint.getObject("urlEndpoint").getString("url");
|
||||
@ -303,6 +377,9 @@ public class YoutubeParsingHelper {
|
||||
if (navigationEndpoint.getObject("watchEndpoint").has("startTimeSeconds"))
|
||||
url.append("&t=").append(navigationEndpoint.getObject("watchEndpoint").getInt("startTimeSeconds"));
|
||||
return url.toString();
|
||||
} else if (navigationEndpoint.getObject("watchPlaylistEndpoint") != null) {
|
||||
return "https://www.youtube.com/playlist?list=" +
|
||||
navigationEndpoint.getObject("watchPlaylistEndpoint").getString("playlistId");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@ -351,12 +428,8 @@ public class YoutubeParsingHelper {
|
||||
return thumbnailUrl;
|
||||
}
|
||||
|
||||
public static JsonArray getJsonResponse(String url, Localization localization) throws IOException, ExtractionException {
|
||||
Map<String, List<String>> headers = new HashMap<>();
|
||||
headers.put("X-YouTube-Client-Name", Collections.singletonList("1"));
|
||||
headers.put("X-YouTube-Client-Version", Collections.singletonList(getClientVersion()));
|
||||
final Response response = getDownloader().get(url, headers, localization);
|
||||
|
||||
public static String getValidJsonResponseBody(final Response response)
|
||||
throws ParsingException, MalformedURLException {
|
||||
if (response.responseCode() == 404) {
|
||||
throw new ContentNotAvailableException("Not found" +
|
||||
" (\"" + response.responseCode() + " " + response.responseMessage() + "\")");
|
||||
@ -377,11 +450,24 @@ public class YoutubeParsingHelper {
|
||||
}
|
||||
|
||||
final String responseContentType = response.getHeader("Content-Type");
|
||||
if (responseContentType != null && responseContentType.toLowerCase().contains("text/html")) {
|
||||
if (responseContentType != null
|
||||
&& responseContentType.toLowerCase().contains("text/html")) {
|
||||
throw new ParsingException("Got HTML document, expected JSON response" +
|
||||
" (latest url was: \"" + response.latestUrl() + "\")");
|
||||
}
|
||||
|
||||
return responseBody;
|
||||
}
|
||||
|
||||
public static JsonArray getJsonResponse(final String url, final Localization localization)
|
||||
throws IOException, ExtractionException {
|
||||
Map<String, List<String>> headers = new HashMap<>();
|
||||
headers.put("X-YouTube-Client-Name", Collections.singletonList("1"));
|
||||
headers.put("X-YouTube-Client-Version", Collections.singletonList(getClientVersion()));
|
||||
final Response response = getDownloader().get(url, headers, localization);
|
||||
|
||||
final String responseBody = getValidJsonResponseBody(response);
|
||||
|
||||
try {
|
||||
return JsonParser.array().from(responseBody);
|
||||
} catch (JsonParserException e) {
|
||||
@ -393,6 +479,7 @@ public class YoutubeParsingHelper {
|
||||
* Shared alert detection function, multiple endpoints return the error similarly structured.
|
||||
* <p>
|
||||
* Will check if the object has an alert of the type "ERROR".
|
||||
* </p>
|
||||
*
|
||||
* @param initialData the object which will be checked if an alert is present
|
||||
* @throws ContentNotAvailableException if an alert is detected
|
||||
|
@ -1,5 +1,6 @@
|
||||
package org.schabi.newpipe.extractor.services.youtube.linkHandler;
|
||||
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotSupportedException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
@ -45,6 +46,11 @@ public class YoutubePlaylistLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
throw new ParsingException("the list-ID given in the URL does not match the list pattern");
|
||||
}
|
||||
|
||||
// Don't accept auto-generated "Mix" playlists but auto-generated YouTube Music playlists
|
||||
if (listID.startsWith("RD") && !listID.startsWith("RDCLAK")) {
|
||||
throw new ContentNotSupportedException("YouTube Mix playlists are not yet supported");
|
||||
}
|
||||
|
||||
return listID;
|
||||
} catch (final Exception exception) {
|
||||
throw new ParsingException("Error could not parse url :" + exception.getMessage(), exception);
|
||||
|
@ -8,13 +8,21 @@ import java.net.URLEncoder;
|
||||
import java.util.List;
|
||||
|
||||
public class YoutubeSearchQueryHandlerFactory extends SearchQueryHandlerFactory {
|
||||
|
||||
public static final String CHARSET_UTF_8 = "UTF-8";
|
||||
|
||||
public static final String ALL = "all";
|
||||
public static final String VIDEOS = "videos";
|
||||
public static final String CHANNELS = "channels";
|
||||
public static final String PLAYLISTS = "playlists";
|
||||
public static final String ALL = "all";
|
||||
|
||||
public static final String MUSIC_SONGS = "music_songs";
|
||||
public static final String MUSIC_VIDEOS = "music_videos";
|
||||
public static final String MUSIC_ALBUMS = "music_albums";
|
||||
public static final String MUSIC_PLAYLISTS = "music_playlists";
|
||||
public static final String MUSIC_ARTISTS = "music_artists";
|
||||
|
||||
private static final String SEARCH_URL = "https://www.youtube.com/results?search_query=";
|
||||
private static final String MUSIC_SEARCH_URL = "https://music.youtube.com/search?q=";
|
||||
|
||||
public static YoutubeSearchQueryHandlerFactory getInstance() {
|
||||
return new YoutubeSearchQueryHandlerFactory();
|
||||
@ -23,20 +31,27 @@ public class YoutubeSearchQueryHandlerFactory extends SearchQueryHandlerFactory
|
||||
@Override
|
||||
public String getUrl(String searchString, List<String> contentFilters, String sortFilter) throws ParsingException {
|
||||
try {
|
||||
final String url = "https://www.youtube.com/results"
|
||||
+ "?search_query=" + URLEncoder.encode(searchString, CHARSET_UTF_8);
|
||||
|
||||
if (contentFilters.size() > 0) {
|
||||
switch (contentFilters.get(0)) {
|
||||
case VIDEOS: return url + "&sp=EgIQAQ%253D%253D";
|
||||
case CHANNELS: return url + "&sp=EgIQAg%253D%253D";
|
||||
case PLAYLISTS: return url + "&sp=EgIQAw%253D%253D";
|
||||
case ALL:
|
||||
default:
|
||||
break;
|
||||
case VIDEOS:
|
||||
return SEARCH_URL + URLEncoder.encode(searchString, CHARSET_UTF_8) + "&sp=EgIQAQ%253D%253D";
|
||||
case CHANNELS:
|
||||
return SEARCH_URL + URLEncoder.encode(searchString, CHARSET_UTF_8) + "&sp=EgIQAg%253D%253D";
|
||||
case PLAYLISTS:
|
||||
return SEARCH_URL + URLEncoder.encode(searchString, CHARSET_UTF_8) + "&sp=EgIQAw%253D%253D";
|
||||
case MUSIC_SONGS:
|
||||
case MUSIC_VIDEOS:
|
||||
case MUSIC_ALBUMS:
|
||||
case MUSIC_PLAYLISTS:
|
||||
case MUSIC_ARTISTS:
|
||||
return MUSIC_SEARCH_URL + URLEncoder.encode(searchString, CHARSET_UTF_8);
|
||||
}
|
||||
}
|
||||
|
||||
return url;
|
||||
return SEARCH_URL + URLEncoder.encode(searchString, CHARSET_UTF_8);
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new ParsingException("Could not encode query", e);
|
||||
}
|
||||
@ -48,6 +63,12 @@ public class YoutubeSearchQueryHandlerFactory extends SearchQueryHandlerFactory
|
||||
ALL,
|
||||
VIDEOS,
|
||||
CHANNELS,
|
||||
PLAYLISTS};
|
||||
PLAYLISTS,
|
||||
MUSIC_SONGS,
|
||||
MUSIC_VIDEOS,
|
||||
MUSIC_ALBUMS,
|
||||
MUSIC_PLAYLISTS
|
||||
// MUSIC_ARTISTS
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotSupportedException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.utils.DashMpdParser;
|
||||
@ -47,7 +48,7 @@ public class StreamInfo extends Info {
|
||||
}
|
||||
|
||||
public StreamInfo(int serviceId, String url, String originalUrl, StreamType streamType, String id, String name,
|
||||
int ageLimit) {
|
||||
int ageLimit) {
|
||||
super(serviceId, id, url, originalUrl, name);
|
||||
this.streamType = streamType;
|
||||
this.ageLimit = ageLimit;
|
||||
@ -131,6 +132,8 @@ public class StreamInfo extends Info {
|
||||
/* Load and extract audio */
|
||||
try {
|
||||
streamInfo.setAudioStreams(extractor.getAudioStreams());
|
||||
} catch (ContentNotSupportedException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
streamInfo.addError(new ExtractionException("Couldn't get audio streams", e));
|
||||
}
|
||||
|
@ -11,7 +11,9 @@ import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class JsonUtils {
|
||||
public static final JsonObject DEFAULT_EMPTY = new JsonObject();
|
||||
public static final JsonObject EMPTY_OBJECT = new JsonObject();
|
||||
public static final JsonArray EMPTY_ARRAY = new JsonArray();
|
||||
public static final String EMPTY_STRING = "";
|
||||
|
||||
private JsonUtils() {
|
||||
}
|
||||
|
@ -0,0 +1,7 @@
|
||||
package org.schabi.newpipe.extractor.services;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public interface BaseSearchExtractorTest extends BaseListExtractorTest {
|
||||
void testSearchString() throws Exception;
|
||||
void testSearchSuggestion() throws Exception;
|
||||
}
|
@ -0,0 +1,53 @@
|
||||
package org.schabi.newpipe.extractor.services;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.extractor.Extractor;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertIsSecureUrl;
|
||||
|
||||
public abstract class DefaultExtractorTest<T extends Extractor> implements BaseExtractorTest {
|
||||
public abstract T extractor() throws Exception;
|
||||
|
||||
public abstract StreamingService expectedService() throws Exception;
|
||||
public abstract String expectedName() throws Exception;
|
||||
public abstract String expectedId() throws Exception;
|
||||
public abstract String expectedUrlContains() throws Exception;
|
||||
public abstract String expectedOriginalUrlContains() throws Exception;
|
||||
|
||||
@Test
|
||||
@Override
|
||||
public void testServiceId() throws Exception {
|
||||
assertEquals(expectedService().getServiceId(), extractor().getServiceId());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Override
|
||||
public void testName() throws Exception {
|
||||
assertEquals(expectedName(), extractor().getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Override
|
||||
public void testId() throws Exception {
|
||||
assertEquals(expectedId(), extractor().getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Override
|
||||
public void testUrl() throws Exception {
|
||||
final String url = extractor().getUrl();
|
||||
assertIsSecureUrl(url);
|
||||
assertThat(url, containsString(expectedUrlContains()));
|
||||
}
|
||||
|
||||
@Test
|
||||
@Override
|
||||
public void testOriginalUrl() throws Exception {
|
||||
final String originalUrl = extractor().getOriginalUrl();
|
||||
assertIsSecureUrl(originalUrl);
|
||||
assertThat(originalUrl, containsString(expectedOriginalUrlContains()));
|
||||
}
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
package org.schabi.newpipe.extractor.services;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.DefaultTests.*;
|
||||
|
||||
public abstract class DefaultListExtractorTest<T extends ListExtractor<? extends InfoItem>> extends DefaultExtractorTest<T>
|
||||
implements BaseListExtractorTest {
|
||||
|
||||
@Nullable
|
||||
public InfoItem.InfoType expectedInfoItemType() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean expectedHasMoreItems() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Test
|
||||
@Override
|
||||
public void testRelatedItems() throws Exception {
|
||||
final ListExtractor<? extends InfoItem> extractor = extractor();
|
||||
|
||||
final InfoItem.InfoType expectedType = expectedInfoItemType();
|
||||
final ListExtractor.InfoItemsPage<? extends InfoItem> items = defaultTestRelatedItems(extractor);
|
||||
if (expectedType != null) {
|
||||
assertOnlyContainsType(items, expectedType);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@Override
|
||||
public void testMoreRelatedItems() throws Exception {
|
||||
final ListExtractor<? extends InfoItem> extractor = extractor();
|
||||
|
||||
if (expectedHasMoreItems()) {
|
||||
final InfoItem.InfoType expectedType = expectedInfoItemType();
|
||||
final ListExtractor.InfoItemsPage<? extends InfoItem> items = defaultTestMoreItems(extractor);
|
||||
if (expectedType != null) {
|
||||
assertOnlyContainsType(items, expectedType);
|
||||
}
|
||||
} else {
|
||||
assertNoMoreItems(extractor);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
package org.schabi.newpipe.extractor.services;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertEmpty;
|
||||
|
||||
public abstract class DefaultSearchExtractorTest extends DefaultListExtractorTest<SearchExtractor>
|
||||
implements BaseSearchExtractorTest {
|
||||
|
||||
public abstract String expectedSearchString();
|
||||
@Nullable public abstract String expectedSearchSuggestion();
|
||||
|
||||
@Test
|
||||
@Override
|
||||
public void testSearchString() throws Exception {
|
||||
assertEquals(expectedSearchString(), extractor().getSearchString());
|
||||
}
|
||||
|
||||
@Test
|
||||
@Override
|
||||
public void testSearchSuggestion() throws Exception {
|
||||
final String expectedSearchSuggestion = expectedSearchSuggestion();
|
||||
if (expectedSearchSuggestion == null || expectedSearchSuggestion.isEmpty()) {
|
||||
assertEmpty("Suggestion was expected to be empty", extractor().getSearchSuggestion());
|
||||
} else {
|
||||
assertEquals(expectedSearchSuggestion, extractor().getSearchSuggestion());
|
||||
}
|
||||
}
|
||||
}
|
@ -2,23 +2,22 @@ package org.schabi.newpipe.extractor.services;
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItem;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
import java.util.Calendar;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static junit.framework.TestCase.assertFalse;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.*;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.*;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.LinkType;
|
||||
|
||||
public final class DefaultTests {
|
||||
public static void defaultTestListOfItems(StreamingService expectedService, List<? extends InfoItem> itemsList, List<Throwable> errors) throws ParsingException {
|
||||
@ -38,11 +37,14 @@ public final class DefaultTests {
|
||||
if (item instanceof StreamInfoItem) {
|
||||
StreamInfoItem streamInfoItem = (StreamInfoItem) item;
|
||||
assertNotEmpty("Uploader name not set: " + item, streamInfoItem.getUploaderName());
|
||||
assertNotEmpty("Uploader url not set: " + item, streamInfoItem.getUploaderUrl());
|
||||
assertIsSecureUrl(streamInfoItem.getUploaderUrl());
|
||||
|
||||
// assertNotEmpty("Uploader url not set: " + item, streamInfoItem.getUploaderUrl());
|
||||
if (streamInfoItem.getUploaderUrl() != null && !streamInfoItem.getUploaderUrl().isEmpty()) {
|
||||
assertIsSecureUrl(streamInfoItem.getUploaderUrl());
|
||||
assertExpectedLinkType(expectedService, streamInfoItem.getUploaderUrl(), LinkType.CHANNEL);
|
||||
}
|
||||
|
||||
assertExpectedLinkType(expectedService, streamInfoItem.getUrl(), LinkType.STREAM);
|
||||
assertExpectedLinkType(expectedService, streamInfoItem.getUploaderUrl(), LinkType.CHANNEL);
|
||||
|
||||
final String textualUploadDate = streamInfoItem.getTextualUploadDate();
|
||||
if (textualUploadDate != null && !textualUploadDate.isEmpty()) {
|
||||
@ -71,12 +73,38 @@ public final class DefaultTests {
|
||||
expectedLinkType, linkTypeByUrl);
|
||||
}
|
||||
|
||||
public static void assertOnlyContainsType(ListExtractor.InfoItemsPage<? extends InfoItem> items, InfoItem.InfoType expectedType) {
|
||||
for (InfoItem item : items.getItems()) {
|
||||
assertEquals("Item list contains unexpected info types",
|
||||
expectedType, item.getInfoType());
|
||||
}
|
||||
}
|
||||
|
||||
public static <T extends InfoItem> void assertNoMoreItems(ListExtractor<T> extractor) throws Exception {
|
||||
assertFalse("More items available when it shouldn't", extractor.hasNextPage());
|
||||
final String nextPageUrl = extractor.getNextPageUrl();
|
||||
assertTrue("Next page is not empty or null", nextPageUrl == null || nextPageUrl.isEmpty());
|
||||
}
|
||||
|
||||
public static void assertNoDuplicatedItems(StreamingService expectedService,
|
||||
ListExtractor.InfoItemsPage<InfoItem> page1,
|
||||
ListExtractor.InfoItemsPage<InfoItem> page2) throws Exception {
|
||||
defaultTestListOfItems(expectedService, page1.getItems(), page1.getErrors());
|
||||
defaultTestListOfItems(expectedService, page2.getItems(), page2.getErrors());
|
||||
|
||||
final Set<String> urlsSet = new HashSet<>();
|
||||
for (InfoItem item : page1.getItems()) {
|
||||
urlsSet.add(item.getUrl());
|
||||
}
|
||||
|
||||
for (InfoItem item : page2.getItems()) {
|
||||
final boolean wasAdded = urlsSet.add(item.getUrl());
|
||||
if (!wasAdded) {
|
||||
fail("Same item was on the first and second page item list");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static <T extends InfoItem> ListExtractor.InfoItemsPage<T> defaultTestRelatedItems(ListExtractor<T> extractor) throws Exception {
|
||||
final ListExtractor.InfoItemsPage<T> page = extractor.getInitialPage();
|
||||
final List<T> itemsList = page.getItems();
|
||||
|
@ -1,58 +0,0 @@
|
||||
package org.schabi.newpipe.extractor.services.media_ccc;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItem;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static junit.framework.TestCase.assertTrue;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.MediaCCC;
|
||||
|
||||
/**
|
||||
* Test for {@link MediaCCCSearchExtractor}
|
||||
*/
|
||||
public class MediaCCCSearchExtractorAllTest {
|
||||
|
||||
private static SearchExtractor extractor;
|
||||
private static ListExtractor.InfoItemsPage<InfoItem> itemsPage;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = MediaCCC.getSearchExtractor(new MediaCCCSearchQueryHandlerFactory()
|
||||
.fromQuery("c3", Arrays.asList(new String[0]), ""));
|
||||
extractor.fetchPage();
|
||||
itemsPage = extractor.getInitialPage();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfChannelInfoItemsAvailable() {
|
||||
boolean isAvialable = false;
|
||||
for (InfoItem item : itemsPage.getItems()) {
|
||||
if (item instanceof ChannelInfoItem) {
|
||||
isAvialable = true;
|
||||
}
|
||||
}
|
||||
assertTrue("ChannelInfoItem not in all list", isAvialable);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIfStreamInfoitemsAvailable() {
|
||||
boolean isAvialable = false;
|
||||
for (InfoItem item : itemsPage.getItems()) {
|
||||
if (item instanceof StreamInfoItem) {
|
||||
isAvialable = true;
|
||||
}
|
||||
}
|
||||
assertTrue("ChannelInfoItem not in all list", isAvialable);
|
||||
}
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
package org.schabi.newpipe.extractor.services.media_ccc;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItem;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static junit.framework.TestCase.assertTrue;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.MediaCCC;
|
||||
|
||||
/**
|
||||
* Test for {@link MediaCCCSearchExtractor}
|
||||
*/
|
||||
public class MediaCCCSearchExtractorConferencesTest {
|
||||
|
||||
private static SearchExtractor extractor;
|
||||
private static ListExtractor.InfoItemsPage<InfoItem> itemsPage;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = MediaCCC.getSearchExtractor(new MediaCCCSearchQueryHandlerFactory()
|
||||
.fromQuery("c3", Arrays.asList(new String[]{"conferences"}), ""));
|
||||
extractor.fetchPage();
|
||||
itemsPage = extractor.getInitialPage();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReturnTypeChannel() {
|
||||
for (InfoItem item : itemsPage.getItems()) {
|
||||
assertTrue("Item is not of type channel", item instanceof ChannelInfoItem);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testItemCount() {
|
||||
assertTrue("Count is to hight: " + itemsPage.getItems().size(), itemsPage.getItems().size() < 127);
|
||||
assertTrue("Countis to low: " + itemsPage.getItems().size(), itemsPage.getItems().size() >= 29);
|
||||
}
|
||||
}
|
@ -1,72 +0,0 @@
|
||||
package org.schabi.newpipe.extractor.services.media_ccc;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static junit.framework.TestCase.assertEquals;
|
||||
import static junit.framework.TestCase.assertTrue;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.MediaCCC;
|
||||
|
||||
/**
|
||||
* Test for {@link MediaCCCSearchExtractor}
|
||||
*/
|
||||
public class MediaCCCSearchExtractorEventsTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static ListExtractor.InfoItemsPage<InfoItem> itemsPage;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = MediaCCC.getSearchExtractor(new MediaCCCSearchQueryHandlerFactory()
|
||||
.fromQuery("linux", Arrays.asList(new String[]{"events"}), ""));
|
||||
extractor.fetchPage();
|
||||
itemsPage = extractor.getInitialPage();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCount() throws Exception {
|
||||
assertTrue(Integer.toString(itemsPage.getItems().size()),
|
||||
itemsPage.getItems().size() >= 25);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testServiceId() throws Exception {
|
||||
assertEquals(2, extractor.getServiceId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testName() throws Exception {
|
||||
assertFalse(itemsPage.getItems().get(0).getName(), itemsPage.getItems().get(0).getName().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUrl() throws Exception {
|
||||
assertTrue("Url should start with: https://api.media.ccc.de/public/events/",
|
||||
itemsPage.getItems().get(0).getUrl().startsWith("https://api.media.ccc.de/public/events/"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testThumbnailUrl() throws Exception {
|
||||
assertTrue(itemsPage.getItems().get(0).getThumbnailUrl(),
|
||||
itemsPage.getItems().get(0).getThumbnailUrl().startsWith("https://static.media.ccc.de/media/")
|
||||
&& itemsPage.getItems().get(0).getThumbnailUrl().endsWith(".jpg"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReturnTypeStream() throws Exception {
|
||||
for (InfoItem item : itemsPage.getItems()) {
|
||||
assertTrue("Item is not of type StreamInfoItem", item instanceof StreamInfoItem);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,90 @@
|
||||
package org.schabi.newpipe.extractor.services.media_ccc.search;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.DefaultSearchExtractorTest;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.MediaCCC;
|
||||
import static org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory.CONFERENCES;
|
||||
import static org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory.EVENTS;
|
||||
|
||||
public class MediaCCCSearchExtractorTest {
|
||||
|
||||
public static class All extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "kde";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = MediaCCC.getSearchExtractor(QUERY);
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return MediaCCC; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "api.media.ccc.de/public/events/search?q=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "api.media.ccc.de/public/events/search?q=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
|
||||
@Override public boolean expectedHasMoreItems() { return false; }
|
||||
}
|
||||
|
||||
public static class Conferences extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "c3";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = MediaCCC.getSearchExtractor(QUERY, singletonList(CONFERENCES), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return MediaCCC; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "api.media.ccc.de/public/events/search?q=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "api.media.ccc.de/public/events/search?q=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
|
||||
@Nullable @Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.CHANNEL; }
|
||||
@Override public boolean expectedHasMoreItems() { return false; }
|
||||
}
|
||||
|
||||
public static class Events extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "linux";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = MediaCCC.getSearchExtractor(QUERY, singletonList(EVENTS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return MediaCCC; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "api.media.ccc.de/public/events/search?q=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "api.media.ccc.de/public/events/search?q=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
|
||||
@Nullable @Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.STREAM; }
|
||||
@Override public boolean expectedHasMoreItems() { return false; }
|
||||
}
|
||||
}
|
@ -0,0 +1,205 @@
|
||||
package org.schabi.newpipe.extractor.services.peertube;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.services.BaseChannelExtractorTest;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeAccountExtractor;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertIsSecureUrl;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
||||
import static org.schabi.newpipe.extractor.services.DefaultTests.*;
|
||||
|
||||
/**
|
||||
* Test for {@link PeertubeAccountExtractor}
|
||||
*/
|
||||
public class PeertubeAccountExtractorTest {
|
||||
public static class KDE implements BaseChannelExtractorTest {
|
||||
private static PeertubeAccountExtractor extractor;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
// setting instance might break test when running in parallel
|
||||
PeerTube.setInstance(new PeertubeInstance("https://peertube.mastodon.host", "PeerTube on Mastodon.host"));
|
||||
extractor = (PeertubeAccountExtractor) PeerTube
|
||||
.getChannelExtractor("https://peertube.mastodon.host/api/v1/accounts/kde");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Extractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testServiceId() {
|
||||
assertEquals(PeerTube.getServiceId(), extractor.getServiceId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testName() throws ParsingException {
|
||||
assertEquals("The KDE Community", extractor.getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testId() throws ParsingException {
|
||||
assertEquals("accounts/kde", extractor.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUrl() throws ParsingException {
|
||||
assertEquals("https://peertube.mastodon.host/api/v1/accounts/kde", extractor.getUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOriginalUrl() throws ParsingException {
|
||||
assertEquals("https://peertube.mastodon.host/accounts/kde", extractor.getOriginalUrl());
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// ListExtractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testRelatedItems() throws Exception {
|
||||
defaultTestRelatedItems(extractor);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMoreRelatedItems() throws Exception {
|
||||
defaultTestMoreItems(extractor);
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// ChannelExtractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testDescription() throws ParsingException {
|
||||
assertNotNull(extractor.getDescription());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAvatarUrl() throws ParsingException {
|
||||
assertIsSecureUrl(extractor.getAvatarUrl());
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Test
|
||||
public void testBannerUrl() throws ParsingException {
|
||||
assertIsSecureUrl(extractor.getBannerUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFeedUrl() throws ParsingException {
|
||||
assertEquals("https://peertube.mastodon.host/feeds/videos.xml?accountId=32465", extractor.getFeedUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubscriberCount() throws ParsingException {
|
||||
assertTrue("Wrong subscriber count", extractor.getSubscriberCount() >= 5);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Booteille implements BaseChannelExtractorTest {
|
||||
private static PeertubeAccountExtractor extractor;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
// setting instance might break test when running in parallel
|
||||
PeerTube.setInstance(new PeertubeInstance("https://peertube.mastodon.host", "PeerTube on Mastodon.host"));
|
||||
extractor = (PeertubeAccountExtractor) PeerTube
|
||||
.getChannelExtractor("https://peertube.mastodon.host/accounts/booteille");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Additional Testing
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testGetPageInNewExtractor() throws Exception {
|
||||
final ChannelExtractor newExtractor = PeerTube.getChannelExtractor(extractor.getUrl());
|
||||
defaultTestGetPageInNewExtractor(extractor, newExtractor);
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Extractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testServiceId() {
|
||||
assertEquals(PeerTube.getServiceId(), extractor.getServiceId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testName() throws ParsingException {
|
||||
assertEquals("booteille", extractor.getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testId() throws ParsingException {
|
||||
assertEquals("accounts/booteille", extractor.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUrl() throws ParsingException {
|
||||
assertEquals("https://peertube.mastodon.host/api/v1/accounts/booteille", extractor.getUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOriginalUrl() throws ParsingException {
|
||||
assertEquals("https://peertube.mastodon.host/accounts/booteille", extractor.getOriginalUrl());
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// ListExtractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testRelatedItems() throws Exception {
|
||||
defaultTestRelatedItems(extractor);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMoreRelatedItems() throws Exception {
|
||||
defaultTestMoreItems(extractor);
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// ChannelExtractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testDescription() throws ParsingException {
|
||||
assertNotNull(extractor.getDescription());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAvatarUrl() throws ParsingException {
|
||||
assertIsSecureUrl(extractor.getAvatarUrl());
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Test
|
||||
public void testBannerUrl() throws ParsingException {
|
||||
assertIsSecureUrl(extractor.getBannerUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFeedUrl() throws ParsingException {
|
||||
assertEquals("https://peertube.mastodon.host/feeds/videos.xml?accountId=1753", extractor.getFeedUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubscriberCount() throws ParsingException {
|
||||
assertTrue("Wrong subscriber count", extractor.getSubscriberCount() >= 1);
|
||||
}
|
||||
}
|
||||
}
|
@ -11,7 +11,6 @@ import org.schabi.newpipe.extractor.services.BaseChannelExtractorTest;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeChannelExtractor;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertEmpty;
|
||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertIsSecureUrl;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
||||
import static org.schabi.newpipe.extractor.services.DefaultTests.*;
|
||||
@ -20,7 +19,7 @@ import static org.schabi.newpipe.extractor.services.DefaultTests.*;
|
||||
* Test for {@link PeertubeChannelExtractor}
|
||||
*/
|
||||
public class PeertubeChannelExtractorTest {
|
||||
public static class KDE implements BaseChannelExtractorTest {
|
||||
public static class DanDAugeTutoriels implements BaseChannelExtractorTest {
|
||||
private static PeertubeChannelExtractor extractor;
|
||||
|
||||
@BeforeClass
|
||||
@ -29,7 +28,7 @@ public class PeertubeChannelExtractorTest {
|
||||
// setting instance might break test when running in parallel
|
||||
PeerTube.setInstance(new PeertubeInstance("https://peertube.mastodon.host", "PeerTube on Mastodon.host"));
|
||||
extractor = (PeertubeChannelExtractor) PeerTube
|
||||
.getChannelExtractor("https://peertube.mastodon.host/api/v1/accounts/kde");
|
||||
.getChannelExtractor("https://peertube.mastodon.host/api/v1/video-channels/7682d9f2-07be-4622-862e-93ec812e2ffa");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@ -44,22 +43,22 @@ public class PeertubeChannelExtractorTest {
|
||||
|
||||
@Test
|
||||
public void testName() throws ParsingException {
|
||||
assertEquals("The KDE Community", extractor.getName());
|
||||
assertEquals("Dan d'Auge tutoriels", extractor.getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testId() throws ParsingException {
|
||||
assertEquals("kde", extractor.getId());
|
||||
assertEquals("video-channels/7682d9f2-07be-4622-862e-93ec812e2ffa", extractor.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUrl() throws ParsingException {
|
||||
assertEquals("https://peertube.mastodon.host/api/v1/accounts/kde", extractor.getUrl());
|
||||
assertEquals("https://peertube.mastodon.host/api/v1/video-channels/7682d9f2-07be-4622-862e-93ec812e2ffa", extractor.getUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOriginalUrl() throws ParsingException {
|
||||
assertEquals("https://peertube.mastodon.host/accounts/kde", extractor.getOriginalUrl());
|
||||
assertEquals("https://peertube.mastodon.host/video-channels/7682d9f2-07be-4622-862e-93ec812e2ffa", extractor.getOriginalUrl());
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
@ -98,16 +97,16 @@ public class PeertubeChannelExtractorTest {
|
||||
|
||||
@Test
|
||||
public void testFeedUrl() throws ParsingException {
|
||||
assertEmpty(extractor.getFeedUrl());
|
||||
assertEquals("https://peertube.mastodon.host/feeds/videos.xml?videoChannelId=1361", extractor.getFeedUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubscriberCount() throws ParsingException {
|
||||
assertTrue("Wrong subscriber count", extractor.getSubscriberCount() >= 5);
|
||||
assertTrue("Wrong subscriber count", extractor.getSubscriberCount() >= 4);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Booteille implements BaseChannelExtractorTest {
|
||||
public static class Divers implements BaseChannelExtractorTest {
|
||||
private static PeertubeChannelExtractor extractor;
|
||||
|
||||
@BeforeClass
|
||||
@ -116,7 +115,7 @@ public class PeertubeChannelExtractorTest {
|
||||
// setting instance might break test when running in parallel
|
||||
PeerTube.setInstance(new PeertubeInstance("https://peertube.mastodon.host", "PeerTube on Mastodon.host"));
|
||||
extractor = (PeertubeChannelExtractor) PeerTube
|
||||
.getChannelExtractor("https://peertube.mastodon.host/accounts/booteille");
|
||||
.getChannelExtractor("https://peertube.mastodon.host/video-channels/35080089-79b6-45fc-96ac-37e4d46a4457");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@ -141,22 +140,22 @@ public class PeertubeChannelExtractorTest {
|
||||
|
||||
@Test
|
||||
public void testName() throws ParsingException {
|
||||
assertEquals("booteille", extractor.getName());
|
||||
assertEquals("Divers", extractor.getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testId() throws ParsingException {
|
||||
assertEquals("booteille", extractor.getId());
|
||||
assertEquals("video-channels/35080089-79b6-45fc-96ac-37e4d46a4457", extractor.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUrl() throws ParsingException {
|
||||
assertEquals("https://peertube.mastodon.host/api/v1/accounts/booteille", extractor.getUrl());
|
||||
assertEquals("https://peertube.mastodon.host/api/v1/video-channels/35080089-79b6-45fc-96ac-37e4d46a4457", extractor.getUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOriginalUrl() throws ParsingException {
|
||||
assertEquals("https://peertube.mastodon.host/accounts/booteille", extractor.getOriginalUrl());
|
||||
assertEquals("https://peertube.mastodon.host/video-channels/35080089-79b6-45fc-96ac-37e4d46a4457", extractor.getOriginalUrl());
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
@ -195,12 +194,12 @@ public class PeertubeChannelExtractorTest {
|
||||
|
||||
@Test
|
||||
public void testFeedUrl() throws ParsingException {
|
||||
assertEmpty(extractor.getFeedUrl());
|
||||
assertEquals("https://peertube.mastodon.host/feeds/videos.xml?videoChannelId=1227", extractor.getFeedUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubscriberCount() throws ParsingException {
|
||||
assertTrue("Wrong subscriber count", extractor.getSubscriberCount() >= 1);
|
||||
assertTrue("Wrong subscriber count", extractor.getSubscriberCount() >= 2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeChanne
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
||||
|
||||
/**
|
||||
* Test for {@link PeertubeChannelLinkHandlerFactory}
|
||||
@ -19,6 +20,7 @@ public class PeertubeChannelLinkHandlerFactoryTest {
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() {
|
||||
PeerTube.setInstance(new PeertubeInstance("https://peertube.mastodon.host", "PeerTube on Mastodon.host"));
|
||||
linkHandler = PeertubeChannelLinkHandlerFactory.getInstance();
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
}
|
||||
@ -26,11 +28,20 @@ public class PeertubeChannelLinkHandlerFactoryTest {
|
||||
@Test
|
||||
public void acceptUrlTest() throws ParsingException {
|
||||
assertTrue(linkHandler.acceptUrl("https://peertube.mastodon.host/accounts/kranti@videos.squat.net"));
|
||||
assertTrue(linkHandler.acceptUrl("https://peertube.mastodon.host/video-channels/7682d9f2-07be-4622-862e-93ec812e2ffa"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getIdFromUrl() throws ParsingException {
|
||||
assertEquals("kranti@videos.squat.net", linkHandler.fromUrl("https://peertube.mastodon.host/accounts/kranti@videos.squat.net").getId());
|
||||
assertEquals("kranti@videos.squat.net", linkHandler.fromUrl("https://peertube.mastodon.host/accounts/kranti@videos.squat.net/videos").getId());
|
||||
assertEquals("accounts/kranti@videos.squat.net", linkHandler.fromUrl("https://peertube.mastodon.host/accounts/kranti@videos.squat.net").getId());
|
||||
assertEquals("accounts/kranti@videos.squat.net", linkHandler.fromUrl("https://peertube.mastodon.host/accounts/kranti@videos.squat.net/videos").getId());
|
||||
assertEquals("video-channels/7682d9f2-07be-4622-862e-93ec812e2ffa", linkHandler.fromUrl("https://peertube.mastodon.host/video-channels/7682d9f2-07be-4622-862e-93ec812e2ffa/videos").getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getUrlFromId() throws ParsingException {
|
||||
assertEquals("https://peertube.mastodon.host/api/v1/video-channels/7682d9f2-07be-4622-862e-93ec812e2ffa", linkHandler.fromId("video-channels/7682d9f2-07be-4622-862e-93ec812e2ffa").getUrl());
|
||||
assertEquals("https://peertube.mastodon.host/api/v1/accounts/kranti@videos.squat.net", linkHandler.fromId("accounts/kranti@videos.squat.net").getUrl());
|
||||
assertEquals("https://peertube.mastodon.host/api/v1/accounts/kranti@videos.squat.net", linkHandler.fromId("kranti@videos.squat.net").getUrl());
|
||||
}
|
||||
}
|
||||
|
@ -145,7 +145,7 @@ public class PeertubeStreamExtractorDefaultTest {
|
||||
@Test
|
||||
public void testGetAgeLimit() throws ExtractionException, IOException {
|
||||
assertEquals(0, extractor.getAgeLimit());
|
||||
PeertubeStreamExtractor ageLimit = (PeertubeStreamExtractor) PeerTube.getStreamExtractor("https://peertube.co.uk/videos/watch/3c0da7fb-e4d9-442e-84e3-a8c47004ee28");
|
||||
PeertubeStreamExtractor ageLimit = (PeertubeStreamExtractor) PeerTube.getStreamExtractor("https://peertube.co.uk/videos/watch/0d501633-f2d9-4476-87c6-71f1c02402a4");
|
||||
ageLimit.fetchPage();
|
||||
assertEquals(18, ageLimit.getAgeLimit());
|
||||
}
|
||||
|
@ -1,28 +0,0 @@
|
||||
package org.schabi.newpipe.extractor.services.peertube.search;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeSearchExtractor;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* Test for {@link PeertubeSearchExtractor}
|
||||
*/
|
||||
public abstract class PeertubeSearchExtractorBaseTest {
|
||||
|
||||
protected static PeertubeSearchExtractor extractor;
|
||||
protected static ListExtractor.InfoItemsPage<InfoItem> itemsPage;
|
||||
|
||||
@Test
|
||||
public void testResultListElementsLength() {
|
||||
assertTrue(Integer.toString(itemsPage.getItems().size()),
|
||||
itemsPage.getItems().size() >= 3);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUrl() throws Exception {
|
||||
assertTrue(extractor.getUrl(), extractor.getUrl().startsWith("https://peertube.mastodon.host/api/v1/search/videos"));
|
||||
}
|
||||
}
|
@ -1,89 +0,0 @@
|
||||
package org.schabi.newpipe.extractor.services.peertube.search;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.services.peertube.PeertubeInstance;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
||||
|
||||
/**
|
||||
* Test for {@link PeertubeSearchExtractor}
|
||||
*/
|
||||
public class PeertubeSearchExtractorDefaultTest extends PeertubeSearchExtractorBaseTest {
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
// setting instance might break test when running in parallel
|
||||
PeerTube.setInstance(new PeertubeInstance("https://peertube.mastodon.host", "PeerTube on Mastodon.host"));
|
||||
extractor = (PeertubeSearchExtractor) PeerTube.getSearchExtractor("kde");
|
||||
extractor.fetchPage();
|
||||
itemsPage = extractor.getInitialPage();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSecondPageUrl() throws Exception {
|
||||
assertEquals("https://peertube.mastodon.host/api/v1/search/videos?search=kde&start=12&count=12", extractor.getNextPageUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testResultList_FirstElement() {
|
||||
InfoItem firstInfoItem = itemsPage.getItems().get(0);
|
||||
|
||||
assertTrue("search does not match", firstInfoItem.getName().toLowerCase().contains("kde"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testResultListCheckIfContainsStreamItems() {
|
||||
boolean hasStreams = false;
|
||||
for (InfoItem item : itemsPage.getItems()) {
|
||||
if (item instanceof StreamInfoItem) {
|
||||
hasStreams = true;
|
||||
}
|
||||
}
|
||||
assertTrue("Has no InfoItemStreams", hasStreams);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSecondPage() throws Exception {
|
||||
extractor = (PeertubeSearchExtractor) PeerTube.getSearchExtractor("internet");
|
||||
itemsPage = extractor.getInitialPage();
|
||||
PeertubeSearchExtractor secondExtractor =
|
||||
(PeertubeSearchExtractor) PeerTube.getSearchExtractor("internet");
|
||||
ListExtractor.InfoItemsPage<InfoItem> secondPage = secondExtractor.getPage(itemsPage.getNextPageUrl());
|
||||
assertTrue(Integer.toString(secondPage.getItems().size()),
|
||||
secondPage.getItems().size() >= 10);
|
||||
|
||||
// check if its the same result
|
||||
boolean equals = true;
|
||||
for (int i = 0; i < secondPage.getItems().size()
|
||||
&& i < itemsPage.getItems().size(); i++) {
|
||||
if (!secondPage.getItems().get(i).getUrl().equals(
|
||||
itemsPage.getItems().get(i).getUrl())) {
|
||||
equals = false;
|
||||
}
|
||||
}
|
||||
assertFalse("First and second page are equal", equals);
|
||||
|
||||
assertEquals("https://peertube.mastodon.host/api/v1/search/videos?search=internet&start=24&count=12",
|
||||
secondPage.getNextPageUrl());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testId() throws Exception {
|
||||
assertEquals("kde", extractor.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testName() {
|
||||
assertEquals("kde", extractor.getName());
|
||||
}
|
||||
}
|
@ -0,0 +1,59 @@
|
||||
package org.schabi.newpipe.extractor.services.peertube.search;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor.InfoItemsPage;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.DefaultSearchExtractorTest;
|
||||
import org.schabi.newpipe.extractor.services.peertube.PeertubeInstance;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.PeerTube;
|
||||
import static org.schabi.newpipe.extractor.services.DefaultTests.assertNoDuplicatedItems;
|
||||
import static org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeSearchQueryHandlerFactory.VIDEOS;
|
||||
|
||||
public class PeertubeSearchExtractorTest {
|
||||
|
||||
public static class All extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "kde";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
// setting instance might break test when running in parallel
|
||||
PeerTube.setInstance(new PeertubeInstance("https://peertube.mastodon.host", "PeerTube on Mastodon.host"));
|
||||
extractor = PeerTube.getSearchExtractor(QUERY);
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return PeerTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "/search/videos?search=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "/search/videos?search=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
}
|
||||
|
||||
public static class PagingTest {
|
||||
@Test
|
||||
public void duplicatedItemsCheck() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
final SearchExtractor extractor = PeerTube.getSearchExtractor("internet", singletonList(VIDEOS), "");
|
||||
extractor.fetchPage();
|
||||
|
||||
final InfoItemsPage<InfoItem> page1 = extractor.getInitialPage();
|
||||
final InfoItemsPage<InfoItem> page2 = extractor.getPage(page1.getNextPageUrl());
|
||||
|
||||
assertNoDuplicatedItems(PeerTube, page1, page2);
|
||||
}
|
||||
}
|
||||
}
|
@ -17,13 +17,15 @@ public class SoundcloudParsingHelperTest {
|
||||
@Test
|
||||
public void assertThatHardcodedClientIdIsValid() throws Exception {
|
||||
assertTrue("Hardcoded client id is not valid anymore",
|
||||
SoundcloudParsingHelper.checkIfHardcodedClientIdIsValid(DownloaderTestImpl.getInstance()));
|
||||
SoundcloudParsingHelper.checkIfHardcodedClientIdIsValid());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void resolveUrlWithEmbedPlayerTest() throws Exception {
|
||||
Assert.assertEquals("https://soundcloud.com/trapcity", SoundcloudParsingHelper.resolveUrlWithEmbedPlayer("https://api.soundcloud.com/users/26057743"));
|
||||
Assert.assertEquals("https://soundcloud.com/nocopyrightsounds", SoundcloudParsingHelper.resolveUrlWithEmbedPlayer("https://api.soundcloud.com/users/16069159"));
|
||||
Assert.assertEquals("https://soundcloud.com/trapcity", SoundcloudParsingHelper.resolveUrlWithEmbedPlayer("https://api-v2.soundcloud.com/users/26057743"));
|
||||
Assert.assertEquals("https://soundcloud.com/nocopyrightsounds", SoundcloudParsingHelper.resolveUrlWithEmbedPlayer("https://api-v2.soundcloud.com/users/16069159"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -1,8 +1,6 @@
|
||||
package org.schabi.newpipe.extractor.services.soundcloud;
|
||||
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
@ -11,6 +9,7 @@ import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.services.BasePlaylistExtractorTest;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertIsSecureUrl;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.SoundCloud;
|
||||
@ -70,14 +69,8 @@ public class SoundcloudPlaylistExtractorTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMoreRelatedItems() {
|
||||
try {
|
||||
defaultTestMoreItems(extractor);
|
||||
} catch (Throwable ignored) {
|
||||
return;
|
||||
}
|
||||
|
||||
fail("This playlist doesn't have more items, it should throw an error");
|
||||
public void testMoreRelatedItems() throws Exception {
|
||||
defaultTestMoreItems(extractor);
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
@ -89,17 +82,17 @@ public class SoundcloudPlaylistExtractorTest {
|
||||
assertIsSecureUrl(extractor.getThumbnailUrl());
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Test
|
||||
public void testBannerUrl() {
|
||||
assertIsSecureUrl(extractor.getBannerUrl());
|
||||
// SoundCloud playlists do not have a banner
|
||||
assertNull(extractor.getBannerUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUploaderUrl() {
|
||||
final String uploaderUrl = extractor.getUploaderUrl();
|
||||
assertIsSecureUrl(uploaderUrl);
|
||||
assertTrue(uploaderUrl, uploaderUrl.contains("liluzivert"));
|
||||
assertThat(uploaderUrl, containsString("liluzivert"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -114,7 +107,7 @@ public class SoundcloudPlaylistExtractorTest {
|
||||
|
||||
@Test
|
||||
public void testStreamCount() {
|
||||
assertTrue("Error in the streams count", extractor.getStreamCount() >= 10);
|
||||
assertTrue("Stream count does not fit: " + extractor.getStreamCount(), extractor.getStreamCount() >= 10);
|
||||
}
|
||||
}
|
||||
|
||||
@ -181,17 +174,17 @@ public class SoundcloudPlaylistExtractorTest {
|
||||
assertIsSecureUrl(extractor.getThumbnailUrl());
|
||||
}
|
||||
|
||||
@Ignore("not implemented")
|
||||
@Test
|
||||
public void testBannerUrl() {
|
||||
assertIsSecureUrl(extractor.getBannerUrl());
|
||||
// SoundCloud playlists do not have a banner
|
||||
assertNull(extractor.getBannerUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUploaderUrl() {
|
||||
final String uploaderUrl = extractor.getUploaderUrl();
|
||||
assertIsSecureUrl(uploaderUrl);
|
||||
assertThat(uploaderUrl, CoreMatchers.containsString("micky96"));
|
||||
assertThat(uploaderUrl, containsString("micky96"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -206,7 +199,7 @@ public class SoundcloudPlaylistExtractorTest {
|
||||
|
||||
@Test
|
||||
public void testStreamCount() {
|
||||
assertTrue("Error in the streams count", extractor.getStreamCount() >= 10);
|
||||
assertTrue("Stream count does not fit: " + extractor.getStreamCount(), extractor.getStreamCount() >= 10);
|
||||
}
|
||||
}
|
||||
|
||||
@ -227,7 +220,7 @@ public class SoundcloudPlaylistExtractorTest {
|
||||
|
||||
@Test
|
||||
public void testGetPageInNewExtractor() throws Exception {
|
||||
final PlaylistExtractor newExtractor = SoundCloud.getPlaylistExtractor(extractor.getUrl());
|
||||
PlaylistExtractor newExtractor = SoundCloud.getPlaylistExtractor(extractor.getUrl());
|
||||
defaultTestGetPageInNewExtractor(extractor, newExtractor);
|
||||
}
|
||||
|
||||
@ -264,14 +257,11 @@ public class SoundcloudPlaylistExtractorTest {
|
||||
// ListExtractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Ignore
|
||||
@Test
|
||||
public void testRelatedItems() throws Exception {
|
||||
defaultTestRelatedItems(extractor);
|
||||
}
|
||||
|
||||
//TODO: FUCK THIS: This triggers a 500 at sever
|
||||
@Ignore
|
||||
@Test
|
||||
public void testMoreRelatedItems() throws Exception {
|
||||
ListExtractor.InfoItemsPage<StreamInfoItem> currentPage = defaultTestMoreItems(extractor);
|
||||
@ -286,16 +276,15 @@ public class SoundcloudPlaylistExtractorTest {
|
||||
// PlaylistExtractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Ignore
|
||||
@Test
|
||||
public void testThumbnailUrl() {
|
||||
assertIsSecureUrl(extractor.getThumbnailUrl());
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Test
|
||||
public void testBannerUrl() {
|
||||
assertIsSecureUrl(extractor.getBannerUrl());
|
||||
// SoundCloud playlists do not have a banner
|
||||
assertNull(extractor.getBannerUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -317,7 +306,105 @@ public class SoundcloudPlaylistExtractorTest {
|
||||
|
||||
@Test
|
||||
public void testStreamCount() {
|
||||
assertTrue("Error in the streams count", extractor.getStreamCount() >= 3900);
|
||||
assertTrue("Stream count does not fit: " + extractor.getStreamCount(), extractor.getStreamCount() >= 370);
|
||||
}
|
||||
}
|
||||
|
||||
public static class SmallPlaylist implements BasePlaylistExtractorTest {
|
||||
private static SoundcloudPlaylistExtractor extractor;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = (SoundcloudPlaylistExtractor) SoundCloud
|
||||
.getPlaylistExtractor("https://soundcloud.com/breezy-123/sets/empty-playlist?test=123");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Extractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testServiceId() {
|
||||
assertEquals(SoundCloud.getServiceId(), extractor.getServiceId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testName() {
|
||||
assertEquals("EMPTY PLAYLIST", extractor.getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testId() {
|
||||
assertEquals("23483459", extractor.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUrl() throws Exception {
|
||||
assertEquals("https://soundcloud.com/breezy-123/sets/empty-playlist", extractor.getUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOriginalUrl() throws Exception {
|
||||
assertEquals("https://soundcloud.com/breezy-123/sets/empty-playlist?test=123", extractor.getOriginalUrl());
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// ListExtractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testRelatedItems() throws Exception {
|
||||
defaultTestRelatedItems(extractor);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMoreRelatedItems() throws Exception {
|
||||
try {
|
||||
defaultTestMoreItems(extractor);
|
||||
} catch (Throwable ignored) {
|
||||
return;
|
||||
}
|
||||
|
||||
fail("This playlist doesn't have more items, it should throw an error");
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// PlaylistExtractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testThumbnailUrl() {
|
||||
assertIsSecureUrl(extractor.getThumbnailUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBannerUrl() {
|
||||
// SoundCloud playlists do not have a banner
|
||||
assertNull(extractor.getBannerUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUploaderUrl() {
|
||||
final String uploaderUrl = extractor.getUploaderUrl();
|
||||
assertIsSecureUrl(uploaderUrl);
|
||||
assertThat(uploaderUrl, containsString("breezy-123"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUploaderName() {
|
||||
assertEquals("breezy-123", extractor.getUploaderName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUploaderAvatarUrl() {
|
||||
assertIsSecureUrl(extractor.getUploaderAvatarUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStreamCount() {
|
||||
assertEquals(2, extractor.getStreamCount());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotSupportedException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
@ -25,107 +26,134 @@ import static org.schabi.newpipe.extractor.ServiceList.SoundCloud;
|
||||
* Test for {@link StreamExtractor}
|
||||
*/
|
||||
public class SoundcloudStreamExtractorDefaultTest {
|
||||
private static SoundcloudStreamExtractor extractor;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = (SoundcloudStreamExtractor) SoundCloud.getStreamExtractor("https://soundcloud.com/liluzivert/do-what-i-want-produced-by-maaly-raw-don-cannon");
|
||||
extractor.fetchPage();
|
||||
public static class LilUziVertDoWhatIWant {
|
||||
private static SoundcloudStreamExtractor extractor;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = (SoundcloudStreamExtractor) SoundCloud.getStreamExtractor("https://soundcloud.com/liluzivert/do-what-i-want-produced-by-maaly-raw-don-cannon");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetInvalidTimeStamp() throws ParsingException {
|
||||
assertTrue(extractor.getTimeStamp() + "",
|
||||
extractor.getTimeStamp() <= 0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetValidTimeStamp() throws IOException, ExtractionException {
|
||||
StreamExtractor extractor = SoundCloud.getStreamExtractor("https://soundcloud.com/liluzivert/do-what-i-want-produced-by-maaly-raw-don-cannon#t=69");
|
||||
assertEquals("69", extractor.getTimeStamp() + "");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetTitle() throws ParsingException {
|
||||
assertEquals("Do What I Want [Produced By Maaly Raw + Don Cannon]", extractor.getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetDescription() throws ParsingException {
|
||||
assertEquals("The Perfect LUV Tape®️", extractor.getDescription().getContent());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetUploaderName() throws ParsingException {
|
||||
assertEquals("LIL UZI VERT", extractor.getUploaderName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetLength() throws ParsingException {
|
||||
assertEquals(175, extractor.getLength());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetViewCount() throws ParsingException {
|
||||
assertTrue(Long.toString(extractor.getViewCount()),
|
||||
extractor.getViewCount() > 44227978);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetTextualUploadDate() throws ParsingException {
|
||||
Assert.assertEquals("2016-07-31 18:18:07", extractor.getTextualUploadDate());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetUploadDate() throws ParsingException, ParseException {
|
||||
final Calendar instance = Calendar.getInstance();
|
||||
instance.setTime(new SimpleDateFormat("yyyy/MM/dd HH:mm:ss +0000").parse("2016/07/31 18:18:07 +0000"));
|
||||
assertEquals(instance, requireNonNull(extractor.getUploadDate()).date());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetUploaderUrl() throws ParsingException {
|
||||
assertIsSecureUrl(extractor.getUploaderUrl());
|
||||
assertEquals("https://soundcloud.com/liluzivert", extractor.getUploaderUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetThumbnailUrl() throws ParsingException {
|
||||
assertIsSecureUrl(extractor.getThumbnailUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetUploaderAvatarUrl() throws ParsingException {
|
||||
assertIsSecureUrl(extractor.getUploaderAvatarUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetAudioStreams() throws IOException, ExtractionException {
|
||||
assertFalse(extractor.getAudioStreams().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStreamType() throws ParsingException {
|
||||
assertTrue(extractor.getStreamType() == StreamType.AUDIO_STREAM);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetRelatedVideos() throws ExtractionException, IOException {
|
||||
StreamInfoItemsCollector relatedVideos = extractor.getRelatedStreams();
|
||||
assertFalse(relatedVideos.getItems().isEmpty());
|
||||
assertTrue(relatedVideos.getErrors().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSubtitlesListDefault() throws IOException, ExtractionException {
|
||||
// Video (/view?v=YQHsXMglC9A) set in the setUp() method has no captions => null
|
||||
assertTrue(extractor.getSubtitlesDefault().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSubtitlesList() throws IOException, ExtractionException {
|
||||
// Video (/view?v=YQHsXMglC9A) set in the setUp() method has no captions => null
|
||||
assertTrue(extractor.getSubtitlesDefault().isEmpty());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetInvalidTimeStamp() throws ParsingException {
|
||||
assertTrue(extractor.getTimeStamp() + "",
|
||||
extractor.getTimeStamp() <= 0);
|
||||
}
|
||||
public static class ContentNotSupported {
|
||||
@BeforeClass
|
||||
public static void setUp() {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetValidTimeStamp() throws IOException, ExtractionException {
|
||||
StreamExtractor extractor = SoundCloud.getStreamExtractor("https://soundcloud.com/liluzivert/do-what-i-want-produced-by-maaly-raw-don-cannon#t=69");
|
||||
assertEquals("69", extractor.getTimeStamp() + "");
|
||||
}
|
||||
@Test(expected = ContentNotSupportedException.class)
|
||||
public void hlsAudioStream() throws Exception {
|
||||
final StreamExtractor extractor =
|
||||
SoundCloud.getStreamExtractor("https://soundcloud.com/dualipa/cool");
|
||||
extractor.fetchPage();
|
||||
extractor.getAudioStreams();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetTitle() throws ParsingException {
|
||||
assertEquals("Do What I Want [Produced By Maaly Raw + Don Cannon]", extractor.getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetDescription() throws ParsingException {
|
||||
assertEquals("The Perfect LUV Tape®️", extractor.getDescription().getContent());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetUploaderName() throws ParsingException {
|
||||
assertEquals("LIL UZI VERT", extractor.getUploaderName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetLength() throws ParsingException {
|
||||
assertEquals(175, extractor.getLength());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetViewCount() throws ParsingException {
|
||||
assertTrue(Long.toString(extractor.getViewCount()),
|
||||
extractor.getViewCount() > 44227978);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetTextualUploadDate() throws ParsingException {
|
||||
Assert.assertEquals("2016/07/31 18:18:07 +0000", extractor.getTextualUploadDate());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetUploadDate() throws ParsingException, ParseException {
|
||||
final Calendar instance = Calendar.getInstance();
|
||||
instance.setTime(new SimpleDateFormat("yyyy/MM/dd HH:mm:ss +0000").parse("2016/07/31 18:18:07 +0000"));
|
||||
assertEquals(instance, requireNonNull(extractor.getUploadDate()).date());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetUploaderUrl() throws ParsingException {
|
||||
assertIsSecureUrl(extractor.getUploaderUrl());
|
||||
assertEquals("https://soundcloud.com/liluzivert", extractor.getUploaderUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetThumbnailUrl() throws ParsingException {
|
||||
assertIsSecureUrl(extractor.getThumbnailUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetUploaderAvatarUrl() throws ParsingException {
|
||||
assertIsSecureUrl(extractor.getUploaderAvatarUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetAudioStreams() throws IOException, ExtractionException {
|
||||
assertFalse(extractor.getAudioStreams().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStreamType() throws ParsingException {
|
||||
assertTrue(extractor.getStreamType() == StreamType.AUDIO_STREAM);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetRelatedVideos() throws ExtractionException, IOException {
|
||||
StreamInfoItemsCollector relatedVideos = extractor.getRelatedStreams();
|
||||
assertFalse(relatedVideos.getItems().isEmpty());
|
||||
assertTrue(relatedVideos.getErrors().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSubtitlesListDefault() throws IOException, ExtractionException {
|
||||
// Video (/view?v=YQHsXMglC9A) set in the setUp() method has no captions => null
|
||||
assertTrue(extractor.getSubtitlesDefault().isEmpty());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSubtitlesList() throws IOException, ExtractionException {
|
||||
// Video (/view?v=YQHsXMglC9A) set in the setUp() method has no captions => null
|
||||
assertTrue(extractor.getSubtitlesDefault().isEmpty());
|
||||
@Test(expected = ContentNotSupportedException.class)
|
||||
public void bothHlsAndOpusAudioStreams() throws Exception {
|
||||
final StreamExtractor extractor =
|
||||
SoundCloud.getStreamExtractor("https://soundcloud.com/lil-baby-4pf/no-sucker");
|
||||
extractor.fetchPage();
|
||||
extractor.getAudioStreams();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,55 +0,0 @@
|
||||
package org.schabi.newpipe.extractor.services.soundcloud.search;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.services.soundcloud.SoundcloudSearchExtractor;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 17.06.18
|
||||
*
|
||||
* Copyright (C) Christian Schabesberger 2018 <chris.schabesberger@mailbox.org>
|
||||
* SoundcloudSearchExtractorBaseTest.java is part of NewPipe.
|
||||
*
|
||||
* NewPipe is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* NewPipe is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test for {@link SoundcloudSearchExtractor}
|
||||
*/
|
||||
public abstract class SoundcloudSearchExtractorBaseTest {
|
||||
|
||||
protected static SoundcloudSearchExtractor extractor;
|
||||
protected static ListExtractor.InfoItemsPage<InfoItem> itemsPage;
|
||||
|
||||
|
||||
protected static String removeClientId(String url) {
|
||||
String[] splitUrl = url.split("client_id=[a-zA-Z0-9]*&");
|
||||
return splitUrl[0] + splitUrl[1];
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testResultListElementsLength() {
|
||||
assertTrue(Integer.toString(itemsPage.getItems().size()),
|
||||
itemsPage.getItems().size() >= 3);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUrl() throws Exception {
|
||||
assertTrue(extractor.getUrl(), extractor.getUrl().startsWith("https://api-v2.soundcloud.com/search"));
|
||||
}
|
||||
}
|
@ -1,66 +0,0 @@
|
||||
package org.schabi.newpipe.extractor.services.soundcloud.search;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItem;
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
import org.schabi.newpipe.extractor.services.soundcloud.SoundcloudSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.soundcloud.SoundcloudSearchQueryHandlerFactory;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.SoundCloud;
|
||||
|
||||
public class SoundcloudSearchExtractorChannelOnlyTest extends SoundcloudSearchExtractorBaseTest {
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance(), new Localization("de", "DE"));
|
||||
extractor = (SoundcloudSearchExtractor) SoundCloud.getSearchExtractor("lill uzi vert",
|
||||
asList(SoundcloudSearchQueryHandlerFactory.USERS), null);
|
||||
extractor.fetchPage();
|
||||
itemsPage = extractor.getInitialPage();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSecondPage() throws Exception {
|
||||
SoundcloudSearchExtractor secondExtractor = (SoundcloudSearchExtractor) SoundCloud.getSearchExtractor("lill uzi vert",
|
||||
asList(SoundcloudSearchQueryHandlerFactory.USERS), null);
|
||||
ListExtractor.InfoItemsPage<InfoItem> secondPage = secondExtractor.getPage(itemsPage.getNextPageUrl());
|
||||
assertTrue(Integer.toString(secondPage.getItems().size()),
|
||||
secondPage.getItems().size() >= 3);
|
||||
|
||||
// check if its the same result
|
||||
boolean equals = true;
|
||||
for (int i = 0; i < secondPage.getItems().size()
|
||||
&& i < itemsPage.getItems().size(); i++) {
|
||||
if (!secondPage.getItems().get(i).getUrl().equals(
|
||||
itemsPage.getItems().get(i).getUrl())) {
|
||||
equals = false;
|
||||
}
|
||||
}
|
||||
assertFalse("First and second page are equal", equals);
|
||||
|
||||
assertEquals("https://api-v2.soundcloud.com/search/users?q=lill+uzi+vert&limit=10&offset=20",
|
||||
removeClientId(secondPage.getNextPageUrl()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSecondPageUrl() throws Exception {
|
||||
assertEquals("https://api-v2.soundcloud.com/search/users?q=lill+uzi+vert&limit=10&offset=10",
|
||||
removeClientId(extractor.getNextPageUrl()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOnlyContainChannels() {
|
||||
for (InfoItem item : itemsPage.getItems()) {
|
||||
if (!(item instanceof ChannelInfoItem)) {
|
||||
fail("The following item is no channel item: " + item.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,104 +0,0 @@
|
||||
package org.schabi.newpipe.extractor.services.soundcloud.search;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.services.soundcloud.SoundcloudSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.soundcloud.SoundcloudSearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.SoundCloud;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 27.05.18
|
||||
*
|
||||
* Copyright (C) Christian Schabesberger 2018 <chris.schabesberger@mailbox.org>
|
||||
* YoutubeSearchExtractorStreamTest.java is part of NewPipe.
|
||||
*
|
||||
* NewPipe is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* NewPipe is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test for {@link YoutubeSearchExtractor}
|
||||
*/
|
||||
public class SoundcloudSearchExtractorDefaultTest extends SoundcloudSearchExtractorBaseTest {
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = (SoundcloudSearchExtractor) SoundCloud.getSearchExtractor(
|
||||
new SoundcloudSearchQueryHandlerFactory().fromQuery("lill uzi vert",
|
||||
Arrays.asList(new String[]{"tracks"}), ""));
|
||||
extractor.fetchPage();
|
||||
itemsPage = extractor.getInitialPage();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSecondPageUrl() throws Exception {
|
||||
assertEquals("https://api-v2.soundcloud.com/search/tracks?q=lill+uzi+vert&limit=10&offset=10",
|
||||
removeClientId(extractor.getNextPageUrl()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testResultListCheckIfContainsStreamItems() {
|
||||
boolean hasStreams = false;
|
||||
for (InfoItem item : itemsPage.getItems()) {
|
||||
if (item instanceof StreamInfoItem) {
|
||||
hasStreams = true;
|
||||
}
|
||||
}
|
||||
assertTrue("Has no InfoItemStreams", hasStreams);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSecondPage() throws Exception {
|
||||
SoundcloudSearchExtractor secondExtractor =
|
||||
(SoundcloudSearchExtractor) SoundCloud.getSearchExtractor("lill uzi vert");
|
||||
ListExtractor.InfoItemsPage<InfoItem> secondPage = secondExtractor.getPage(itemsPage.getNextPageUrl());
|
||||
assertTrue(Integer.toString(secondPage.getItems().size()),
|
||||
secondPage.getItems().size() >= 10);
|
||||
|
||||
// check if its the same result
|
||||
boolean equals = true;
|
||||
for (int i = 0; i < secondPage.getItems().size()
|
||||
&& i < itemsPage.getItems().size(); i++) {
|
||||
if (!secondPage.getItems().get(i).getUrl().equals(
|
||||
itemsPage.getItems().get(i).getUrl())) {
|
||||
equals = false;
|
||||
}
|
||||
}
|
||||
assertFalse("First and second page are equal", equals);
|
||||
|
||||
assertEquals("https://api-v2.soundcloud.com/search/tracks?q=lill+uzi+vert&limit=10&offset=20",
|
||||
removeClientId(secondPage.getNextPageUrl()));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testId() throws Exception {
|
||||
assertEquals("lill uzi vert", extractor.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testName() {
|
||||
assertEquals("lill uzi vert", extractor.getName());
|
||||
}
|
||||
}
|
@ -0,0 +1,135 @@
|
||||
package org.schabi.newpipe.extractor.services.soundcloud.search;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor.InfoItemsPage;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.DefaultSearchExtractorTest;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.SoundCloud;
|
||||
import static org.schabi.newpipe.extractor.services.DefaultTests.assertNoDuplicatedItems;
|
||||
import static org.schabi.newpipe.extractor.services.soundcloud.SoundcloudSearchQueryHandlerFactory.*;
|
||||
|
||||
public class SoundcloudSearchExtractorTest {
|
||||
|
||||
public static class All extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "lill uzi vert";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = SoundCloud.getSearchExtractor(QUERY);
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return SoundCloud; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "soundcloud.com/search?q=" + urlEncode(QUERY); }
|
||||
@Override public String expectedOriginalUrlContains() { return "soundcloud.com/search?q=" + urlEncode(QUERY); }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
}
|
||||
|
||||
public static class Tracks extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "lill uzi vert";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = SoundCloud.getSearchExtractor(QUERY, singletonList(TRACKS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return SoundCloud; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "soundcloud.com/search/tracks?q=" + urlEncode(QUERY); }
|
||||
@Override public String expectedOriginalUrlContains() { return "soundcloud.com/search/tracks?q=" + urlEncode(QUERY); }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.STREAM; }
|
||||
}
|
||||
|
||||
public static class Users extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "lill uzi vert";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = SoundCloud.getSearchExtractor(QUERY, singletonList(USERS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return SoundCloud; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "soundcloud.com/search/users?q=" + urlEncode(QUERY); }
|
||||
@Override public String expectedOriginalUrlContains() { return "soundcloud.com/search/users?q=" + urlEncode(QUERY); }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.CHANNEL; }
|
||||
}
|
||||
|
||||
public static class Playlists extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "lill uzi vert";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = SoundCloud.getSearchExtractor(QUERY, singletonList(PLAYLISTS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return SoundCloud; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "soundcloud.com/search/playlists?q=" + urlEncode(QUERY); }
|
||||
@Override public String expectedOriginalUrlContains() { return "soundcloud.com/search/playlists?q=" + urlEncode(QUERY); }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.PLAYLIST; }
|
||||
}
|
||||
|
||||
public static class PagingTest {
|
||||
@Test
|
||||
public void duplicatedItemsCheck() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
final SearchExtractor extractor = SoundCloud.getSearchExtractor("cirque du soleil", singletonList(TRACKS), "");
|
||||
extractor.fetchPage();
|
||||
|
||||
final InfoItemsPage<InfoItem> page1 = extractor.getInitialPage();
|
||||
final InfoItemsPage<InfoItem> page2 = extractor.getPage(page1.getNextPageUrl());
|
||||
|
||||
assertNoDuplicatedItems(SoundCloud, page1, page2);
|
||||
}
|
||||
}
|
||||
|
||||
private static String urlEncode(String value) {
|
||||
try {
|
||||
return URLEncoder.encode(value, CHARSET_UTF_8);
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
@ -6,11 +6,16 @@ import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotSupportedException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.services.BaseChannelExtractorTest;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeChannelExtractor;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertEmpty;
|
||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertIsSecureUrl;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.YouTube;
|
||||
import static org.schabi.newpipe.extractor.services.DefaultTests.*;
|
||||
@ -41,6 +46,20 @@ public class YoutubeChannelExtractorTest {
|
||||
}
|
||||
}
|
||||
|
||||
public static class NotSupported {
|
||||
@BeforeClass
|
||||
public static void setUp() {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
}
|
||||
|
||||
@Test(expected = ContentNotSupportedException.class)
|
||||
public void noVideoTab() throws Exception {
|
||||
final ChannelExtractor extractor = YouTube.getChannelExtractor("https://invidio.us/channel/UC-9-kyTW8ZkZNDHQJ6FgpwQ");
|
||||
extractor.fetchPage();
|
||||
extractor.getInitialPage();
|
||||
}
|
||||
}
|
||||
|
||||
public static class Gronkh implements BaseChannelExtractorTest {
|
||||
private static YoutubeChannelExtractor extractor;
|
||||
|
||||
@ -505,6 +524,97 @@ public class YoutubeChannelExtractorTest {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Some VEVO channels will redirect to a new page with a new channel id.
|
||||
* <p>
|
||||
* Though, it isn't a simple redirect, but a redirect instruction embed in the response itself, this
|
||||
* test assure that we account for that.
|
||||
*/
|
||||
public static class RedirectedChannel implements BaseChannelExtractorTest {
|
||||
private static YoutubeChannelExtractor extractor;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = (YoutubeChannelExtractor) YouTube
|
||||
.getChannelExtractor("https://www.youtube.com/channel/UCITk7Ky4iE5_xISw9IaHqpQ");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Extractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testServiceId() {
|
||||
assertEquals(YouTube.getServiceId(), extractor.getServiceId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testName() throws Exception {
|
||||
assertEquals("LordiVEVO", extractor.getName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testId() throws Exception {
|
||||
assertEquals("UCrxkwepj7-4Wz1wHyfzw-sQ", extractor.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUrl() throws ParsingException {
|
||||
assertEquals("https://www.youtube.com/channel/UCrxkwepj7-4Wz1wHyfzw-sQ", extractor.getUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOriginalUrl() throws ParsingException {
|
||||
assertEquals("https://www.youtube.com/channel/UCITk7Ky4iE5_xISw9IaHqpQ", extractor.getOriginalUrl());
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// ListExtractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testRelatedItems() throws Exception {
|
||||
defaultTestRelatedItems(extractor);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMoreRelatedItems() throws Exception {
|
||||
assertNoMoreItems(extractor);
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// ChannelExtractor
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testDescription() throws Exception {
|
||||
assertEmpty(extractor.getDescription());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAvatarUrl() throws Exception {
|
||||
String avatarUrl = extractor.getAvatarUrl();
|
||||
assertIsSecureUrl(avatarUrl);
|
||||
assertTrue(avatarUrl, avatarUrl.contains("yt3"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBannerUrl() throws Exception {
|
||||
assertEmpty(extractor.getBannerUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFeedUrl() throws Exception {
|
||||
assertEquals("https://www.youtube.com/feeds/videos.xml?channel_id=UCrxkwepj7-4Wz1wHyfzw-sQ", extractor.getFeedUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSubscriberCount() throws Exception {
|
||||
assertEquals(-1, extractor.getSubscriberCount());
|
||||
}
|
||||
}
|
||||
|
||||
public static class RandomChannel implements BaseChannelExtractorTest {
|
||||
private static YoutubeChannelExtractor extractor;
|
||||
|
@ -22,4 +22,10 @@ public class YoutubeParsingHelperTest {
|
||||
assertTrue("Hardcoded client version is not valid anymore",
|
||||
YoutubeParsingHelper.isHardcodedClientVersionValid());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAreHardcodedYoutubeMusicKeysValid() throws IOException, ExtractionException {
|
||||
assertTrue("Hardcoded YouTube Music keys are not valid anymore",
|
||||
YoutubeParsingHelper.areHardcodedYoutubeMusicKeysValid());
|
||||
}
|
||||
}
|
||||
|
@ -55,6 +55,8 @@ public class YoutubePlaylistLinkHandlerFactoryTest {
|
||||
assertTrue(linkHandler.acceptUrl("www.youtube.com/playlist?list=PLW5y1tjAOzI3orQNF1yGGVL5x-pR2K1dC"));
|
||||
assertTrue(linkHandler.acceptUrl("www.youtube.com/playlist?list=PLz8YL4HVC87WJQDzVoY943URKQCsHS9XV"));
|
||||
assertTrue(linkHandler.acceptUrl("https://music.youtube.com/playlist?list=OLAK5uy_lEBUW9iTwqf0IlYPxZ8LrzpgqjAHZgZpM"));
|
||||
assertTrue(linkHandler.acceptUrl("https://www.youtube.com/playlist?list=RDCLAK5uy_ly6s4irLuZAcjEDwJmqcA_UtSipMyGgbQ")); // YouTube Music playlist
|
||||
assertFalse(linkHandler.acceptUrl("https://www.youtube.com/watch?v=2kZVEUGLgy4&list=RDdoEcQv1wlsI&index=2, ")); // YouTube Mix
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -0,0 +1,153 @@
|
||||
package org.schabi.newpipe.extractor.services.youtube.search;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.DefaultSearchExtractorTest;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeSearchQueryHandlerFactory;
|
||||
|
||||
import java.net.URLEncoder;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.YouTube;
|
||||
|
||||
public class YoutubeMusicSearchExtractorTest {
|
||||
public static class MusicSongs extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "mocromaniac";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY, singletonList(YoutubeSearchQueryHandlerFactory.MUSIC_SONGS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "music.youtube.com/search?q=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "music.youtube.com/search?q=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.STREAM; }
|
||||
}
|
||||
|
||||
public static class MusicVideos extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "fresku";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY, singletonList(YoutubeSearchQueryHandlerFactory.MUSIC_VIDEOS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "music.youtube.com/search?q=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "music.youtube.com/search?q=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.STREAM; }
|
||||
}
|
||||
|
||||
public static class MusicAlbums extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "johnny sellah";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY, singletonList(YoutubeSearchQueryHandlerFactory.MUSIC_ALBUMS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "music.youtube.com/search?q=" + URLEncoder.encode(QUERY); }
|
||||
@Override public String expectedOriginalUrlContains() { return "music.youtube.com/search?q=" + URLEncoder.encode(QUERY); }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.PLAYLIST; }
|
||||
}
|
||||
|
||||
public static class MusicPlaylists extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "louivos";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY, singletonList(YoutubeSearchQueryHandlerFactory.MUSIC_PLAYLISTS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "music.youtube.com/search?q=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "music.youtube.com/search?q=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.PLAYLIST; }
|
||||
}
|
||||
|
||||
@Ignore
|
||||
public static class MusicArtists extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "kevin";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY, singletonList(YoutubeSearchQueryHandlerFactory.MUSIC_ARTISTS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "music.youtube.com/search?q=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "music.youtube.com/search?q=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.CHANNEL; }
|
||||
}
|
||||
|
||||
public static class Suggestion extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "megaman x3";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY, singletonList(YoutubeSearchQueryHandlerFactory.MUSIC_SONGS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "music.youtube.com/search?q=" + URLEncoder.encode(QUERY); }
|
||||
@Override public String expectedOriginalUrlContains() { return "music.youtube.com/search?q=" + URLEncoder.encode(QUERY); }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return "mega man x3"; }
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.STREAM; }
|
||||
}
|
||||
}
|
@ -1,50 +0,0 @@
|
||||
package org.schabi.newpipe.extractor.services.youtube.search;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeSearchExtractor;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 27.05.18
|
||||
*
|
||||
* Copyright (C) Christian Schabesberger 2018 <chris.schabesberger@mailbox.org>
|
||||
* YoutubeSearchExtractorBaseTest.java is part of NewPipe.
|
||||
*
|
||||
* NewPipe is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* NewPipe is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test for {@link YoutubeSearchExtractor}
|
||||
*/
|
||||
public abstract class YoutubeSearchExtractorBaseTest {
|
||||
|
||||
protected static YoutubeSearchExtractor extractor;
|
||||
protected static ListExtractor.InfoItemsPage<InfoItem> itemsPage;
|
||||
|
||||
|
||||
@Test
|
||||
public void testResultListElementsLength() {
|
||||
assertTrue(Integer.toString(itemsPage.getItems().size()),
|
||||
itemsPage.getItems().size() > 10);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUrl() throws Exception {
|
||||
assertTrue(extractor.getUrl(), extractor.getUrl().startsWith("https://www.youtube.com"));
|
||||
}
|
||||
}
|
@ -1,115 +0,0 @@
|
||||
package org.schabi.newpipe.extractor.services.youtube.search;
|
||||
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItem;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeSearchQueryHandlerFactory;
|
||||
|
||||
import java.net.URL;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.YouTube;
|
||||
|
||||
public class YoutubeSearchExtractorChannelOnlyTest extends YoutubeSearchExtractorBaseTest {
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = (YoutubeSearchExtractor) YouTube.getSearchExtractor("pewdiepie",
|
||||
asList(YoutubeSearchQueryHandlerFactory.CHANNELS), null);
|
||||
extractor.fetchPage();
|
||||
itemsPage = extractor.getInitialPage();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSecondPage() throws Exception {
|
||||
YoutubeSearchExtractor secondExtractor = (YoutubeSearchExtractor) YouTube.getSearchExtractor("pewdiepie",
|
||||
asList(YoutubeSearchQueryHandlerFactory.CHANNELS), null);
|
||||
ListExtractor.InfoItemsPage<InfoItem> secondPage = secondExtractor.getPage(itemsPage.getNextPageUrl());
|
||||
assertTrue(Integer.toString(secondPage.getItems().size()),
|
||||
secondPage.getItems().size() > 10);
|
||||
|
||||
// check if its the same result
|
||||
boolean equals = true;
|
||||
for (int i = 0; i < secondPage.getItems().size()
|
||||
&& i < itemsPage.getItems().size(); i++) {
|
||||
if (!secondPage.getItems().get(i).getUrl().equals(
|
||||
itemsPage.getItems().get(i).getUrl())) {
|
||||
equals = false;
|
||||
}
|
||||
}
|
||||
assertFalse("First and second page are equal", equals);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSecondPageUrl() throws Exception {
|
||||
URL url = new URL(extractor.getNextPageUrl());
|
||||
|
||||
assertEquals(url.getHost(), "www.youtube.com");
|
||||
assertEquals(url.getPath(), "/results");
|
||||
|
||||
Map<String, String> queryPairs = new LinkedHashMap<>();
|
||||
for (String queryPair : url.getQuery().split("&")) {
|
||||
int index = queryPair.indexOf("=");
|
||||
queryPairs.put(URLDecoder.decode(queryPair.substring(0, index), "UTF-8"),
|
||||
URLDecoder.decode(queryPair.substring(index + 1), "UTF-8"));
|
||||
}
|
||||
|
||||
assertEquals("pewdiepie", queryPairs.get("search_query"));
|
||||
assertEquals(queryPairs.get("ctoken"), queryPairs.get("continuation"));
|
||||
assertTrue(queryPairs.get("continuation").length() > 5);
|
||||
assertTrue(queryPairs.get("itct").length() > 5);
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Test
|
||||
public void testOnlyContainChannels() {
|
||||
for (InfoItem item : itemsPage.getItems()) {
|
||||
if (!(item instanceof ChannelInfoItem)) {
|
||||
fail("The following item is no channel item: " + item.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChannelUrl() {
|
||||
for (InfoItem item : itemsPage.getItems()) {
|
||||
if (item instanceof ChannelInfoItem) {
|
||||
ChannelInfoItem channel = (ChannelInfoItem) item;
|
||||
|
||||
if (channel.getSubscriberCount() > 1e8) { // the real PewDiePie
|
||||
assertEquals("https://www.youtube.com/channel/UC-lHJZR3Gqxm24_Vd_AJ5Yw", item.getUrl());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (InfoItem item : itemsPage.getItems()) {
|
||||
if (item instanceof ChannelInfoItem) {
|
||||
assertThat(item.getUrl(), CoreMatchers.startsWith("https://www.youtube.com/channel/"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStreamCount() {
|
||||
ChannelInfoItem ci = (ChannelInfoItem) itemsPage.getItems().get(0);
|
||||
assertTrue("Stream count does not fit: " + ci.getStreamCount(),
|
||||
4000 < ci.getStreamCount() && ci.getStreamCount() < 5500);
|
||||
}
|
||||
}
|
@ -1,142 +0,0 @@
|
||||
package org.schabi.newpipe.extractor.services.youtube.search;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItem;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
import java.net.URL;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.YouTube;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 27.05.18
|
||||
*
|
||||
* Copyright (C) Christian Schabesberger 2018 <chris.schabesberger@mailbox.org>
|
||||
* YoutubeSearchExtractorStreamTest.java is part of NewPipe.
|
||||
*
|
||||
* NewPipe is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* NewPipe is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Test for {@link YoutubeSearchExtractor}
|
||||
*/
|
||||
public class YoutubeSearchExtractorDefaultTest extends YoutubeSearchExtractorBaseTest {
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = (YoutubeSearchExtractor) YouTube.getSearchExtractor("pewdiepie");
|
||||
extractor.fetchPage();
|
||||
itemsPage = extractor.getInitialPage();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetUrl() throws Exception {
|
||||
assertEquals("https://www.youtube.com/results?search_query=pewdiepie&gl=GB", extractor.getUrl());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testGetSecondPageUrl() throws Exception {
|
||||
URL url = new URL(extractor.getNextPageUrl());
|
||||
|
||||
assertEquals(url.getHost(), "www.youtube.com");
|
||||
assertEquals(url.getPath(), "/results");
|
||||
|
||||
Map<String, String> queryPairs = new LinkedHashMap<>();
|
||||
for (String queryPair : url.getQuery().split("&")) {
|
||||
int index = queryPair.indexOf("=");
|
||||
queryPairs.put(URLDecoder.decode(queryPair.substring(0, index), "UTF-8"),
|
||||
URLDecoder.decode(queryPair.substring(index + 1), "UTF-8"));
|
||||
}
|
||||
|
||||
assertEquals("pewdiepie", queryPairs.get("search_query"));
|
||||
assertEquals(queryPairs.get("ctoken"), queryPairs.get("continuation"));
|
||||
assertTrue(queryPairs.get("continuation").length() > 5);
|
||||
assertTrue(queryPairs.get("itct").length() > 5);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testResultList_FirstElement() {
|
||||
InfoItem firstInfoItem = itemsPage.getItems().get(0);
|
||||
InfoItem secondInfoItem = itemsPage.getItems().get(1);
|
||||
|
||||
InfoItem channelItem = firstInfoItem instanceof ChannelInfoItem ? firstInfoItem
|
||||
: secondInfoItem;
|
||||
|
||||
// The channel should be the first item
|
||||
assertTrue((firstInfoItem instanceof ChannelInfoItem)
|
||||
|| (secondInfoItem instanceof ChannelInfoItem));
|
||||
assertEquals("name", "PewDiePie", channelItem.getName());
|
||||
assertEquals("url", "https://www.youtube.com/channel/UC-lHJZR3Gqxm24_Vd_AJ5Yw", channelItem.getUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testResultListCheckIfContainsStreamItems() {
|
||||
boolean hasStreams = false;
|
||||
for (InfoItem item : itemsPage.getItems()) {
|
||||
if (item instanceof StreamInfoItem) {
|
||||
hasStreams = true;
|
||||
}
|
||||
}
|
||||
assertTrue("Has no InfoItemStreams", hasStreams);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSecondPage() throws Exception {
|
||||
YoutubeSearchExtractor secondExtractor =
|
||||
(YoutubeSearchExtractor) YouTube.getSearchExtractor("pewdiepie");
|
||||
ListExtractor.InfoItemsPage<InfoItem> secondPage = secondExtractor.getPage(itemsPage.getNextPageUrl());
|
||||
assertTrue(Integer.toString(secondPage.getItems().size()),
|
||||
secondPage.getItems().size() > 10);
|
||||
|
||||
// check if its the same result
|
||||
boolean equals = true;
|
||||
for (int i = 0; i < secondPage.getItems().size()
|
||||
&& i < itemsPage.getItems().size(); i++) {
|
||||
if (!secondPage.getItems().get(i).getUrl().equals(
|
||||
itemsPage.getItems().get(i).getUrl())) {
|
||||
equals = false;
|
||||
}
|
||||
}
|
||||
assertFalse("First and second page are equal", equals);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSuggestionNotNull() throws Exception {
|
||||
//todo write a real test
|
||||
assertNotNull(extractor.getSearchSuggestion());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testId() throws Exception {
|
||||
assertEquals("pewdiepie", extractor.getId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testName() {
|
||||
assertEquals("pewdiepie", extractor.getName());
|
||||
}
|
||||
}
|
@ -0,0 +1,190 @@
|
||||
package org.schabi.newpipe.extractor.services.youtube.search;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.DefaultSearchExtractorTest;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static junit.framework.TestCase.assertFalse;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.schabi.newpipe.extractor.ExtractorAsserts.assertEmptyErrors;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.YouTube;
|
||||
import static org.schabi.newpipe.extractor.services.DefaultTests.assertNoDuplicatedItems;
|
||||
import static org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeSearchQueryHandlerFactory.*;
|
||||
|
||||
public class YoutubeSearchExtractorTest {
|
||||
public static class All extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "test";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY);
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
}
|
||||
|
||||
public static class Channel extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "test";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY, singletonList(CHANNELS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.CHANNEL; }
|
||||
}
|
||||
|
||||
public static class Playlists extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "test";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY, singletonList(PLAYLISTS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.PLAYLIST; }
|
||||
}
|
||||
|
||||
public static class Videos extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "test";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY, singletonList(VIDEOS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.STREAM; }
|
||||
}
|
||||
|
||||
public static class Suggestion extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "pewdeipie";
|
||||
private static final String EXPECTED_SUGGESTION = "pewdiepie";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY, singletonList(VIDEOS), "");
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return EXPECTED_SUGGESTION; }
|
||||
|
||||
@Override public InfoItem.InfoType expectedInfoItemType() { return InfoItem.InfoType.STREAM; }
|
||||
}
|
||||
|
||||
public static class RandomQueryNoMorePages extends DefaultSearchExtractorTest {
|
||||
private static SearchExtractor extractor;
|
||||
private static final String QUERY = "UCO6AK";
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
extractor = YouTube.getSearchExtractor(QUERY);
|
||||
extractor.fetchPage();
|
||||
}
|
||||
|
||||
@Override public SearchExtractor extractor() { return extractor; }
|
||||
@Override public StreamingService expectedService() { return YouTube; }
|
||||
@Override public String expectedName() { return QUERY; }
|
||||
@Override public String expectedId() { return QUERY; }
|
||||
@Override public String expectedUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedOriginalUrlContains() { return "youtube.com/results?search_query=" + QUERY; }
|
||||
@Override public String expectedSearchString() { return QUERY; }
|
||||
@Nullable @Override public String expectedSearchSuggestion() { return null; }
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Test Overrides
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@Test
|
||||
public void testMoreRelatedItems() throws Exception {
|
||||
// YouTube actually gives us an empty next page, but after that, no more pages.
|
||||
assertTrue(extractor.hasNextPage());
|
||||
final ListExtractor.InfoItemsPage<InfoItem> nextEmptyPage = extractor.getPage(extractor.getNextPageUrl());
|
||||
assertEquals(0, nextEmptyPage.getItems().size());
|
||||
assertEmptyErrors("Empty page has errors", nextEmptyPage.getErrors());
|
||||
|
||||
assertFalse("More items available when it shouldn't", nextEmptyPage.hasNextPage());
|
||||
final String nextPageUrl = nextEmptyPage.getNextPageUrl();
|
||||
assertTrue("Next page is not empty or null", nextPageUrl == null || nextPageUrl.isEmpty());
|
||||
}
|
||||
}
|
||||
|
||||
public static class PagingTest {
|
||||
@Test
|
||||
public void duplicatedItemsCheck() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
final SearchExtractor extractor = YouTube.getSearchExtractor("cirque du soleil", singletonList(VIDEOS), "");
|
||||
extractor.fetchPage();
|
||||
|
||||
final ListExtractor.InfoItemsPage<InfoItem> page1 = extractor.getInitialPage();
|
||||
final ListExtractor.InfoItemsPage<InfoItem> page2 = extractor.getPage(page1.getNextPageUrl());
|
||||
|
||||
assertNoDuplicatedItems(YouTube, page1, page2);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,69 +0,0 @@
|
||||
package org.schabi.newpipe.extractor.services.youtube.search;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.schabi.newpipe.DownloaderTestImpl;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeSearchQueryHandlerFactory;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.YouTube;
|
||||
|
||||
public class YoutubeSearchPagingTest {
|
||||
private static ListExtractor.InfoItemsPage<InfoItem> page1;
|
||||
private static ListExtractor.InfoItemsPage<InfoItem> page2;
|
||||
private static Set<String> urlList1;
|
||||
private static Set<String> urlList2;
|
||||
private static int page1Size;
|
||||
private static int page2Size;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() throws Exception {
|
||||
NewPipe.init(DownloaderTestImpl.getInstance());
|
||||
|
||||
YoutubeSearchExtractor extractor = (YoutubeSearchExtractor) YouTube.getSearchExtractor("cirque du soleil",
|
||||
singletonList(YoutubeSearchQueryHandlerFactory.VIDEOS), null);
|
||||
|
||||
extractor.fetchPage();
|
||||
page1 = extractor.getInitialPage();
|
||||
urlList1 = extractUrls(page1.getItems());
|
||||
assertTrue("failed to load search result page one: too few items", 15 < page1.getItems().size());
|
||||
page1Size = page1.getItems().size();
|
||||
assertEquals("duplicated items in search result on page one", page1Size, urlList1.size());
|
||||
|
||||
assertTrue("search result has no second page", page1.hasNextPage());
|
||||
assertNotNull("next page url is null", page1.getNextPageUrl());
|
||||
page2 = extractor.getPage(page1.getNextPageUrl());
|
||||
urlList2 = extractUrls(page2.getItems());
|
||||
page2Size = page2.getItems().size();
|
||||
}
|
||||
|
||||
private static Set<String> extractUrls(List<InfoItem> list) {
|
||||
Set<String> result = new HashSet<>();
|
||||
for (InfoItem item : list) {
|
||||
result.add(item.getUrl());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void secondPageUniqueVideos() {
|
||||
assertEquals("Second search result page has duplicated items", page2Size, urlList2.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noRepeatingVideosInPages() {
|
||||
Set<String> intersection = new HashSet<>(urlList2);
|
||||
intersection.retainAll(urlList1);
|
||||
assertEquals("Found the same item on first AND second search page", 0, intersection.size());
|
||||
}
|
||||
|
||||
}
|
@ -16,6 +16,12 @@ public class YoutubeSearchQHTest {
|
||||
assertEquals("https://www.youtube.com/results?search_query=Poifj%26jaijf", YouTube.getSearchQHFactory().fromQuery("Poifj&jaijf").getUrl());
|
||||
assertEquals("https://www.youtube.com/results?search_query=G%C3%BCl%C3%BCm", YouTube.getSearchQHFactory().fromQuery("Gülüm").getUrl());
|
||||
assertEquals("https://www.youtube.com/results?search_query=%3Fj%24%29H%C2%A7B", YouTube.getSearchQHFactory().fromQuery("?j$)H§B").getUrl());
|
||||
|
||||
assertEquals("https://music.youtube.com/search?q=asdf", YouTube.getSearchQHFactory().fromQuery("asdf", asList(new String[]{MUSIC_SONGS}), "").getUrl());
|
||||
assertEquals("https://music.youtube.com/search?q=hans", YouTube.getSearchQHFactory().fromQuery("hans", asList(new String[]{MUSIC_SONGS}), "").getUrl());
|
||||
assertEquals("https://music.youtube.com/search?q=Poifj%26jaijf", YouTube.getSearchQHFactory().fromQuery("Poifj&jaijf", asList(new String[]{MUSIC_SONGS}), "").getUrl());
|
||||
assertEquals("https://music.youtube.com/search?q=G%C3%BCl%C3%BCm", YouTube.getSearchQHFactory().fromQuery("Gülüm", asList(new String[]{MUSIC_SONGS}), "").getUrl());
|
||||
assertEquals("https://music.youtube.com/search?q=%3Fj%24%29H%C2%A7B", YouTube.getSearchQHFactory().fromQuery("?j$)H§B", asList(new String[]{MUSIC_SONGS}), "").getUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -24,6 +30,9 @@ public class YoutubeSearchQHTest {
|
||||
.fromQuery("", asList(new String[]{VIDEOS}), "").getContentFilters().get(0));
|
||||
assertEquals(CHANNELS, YouTube.getSearchQHFactory()
|
||||
.fromQuery("asdf", asList(new String[]{CHANNELS}), "").getContentFilters().get(0));
|
||||
|
||||
assertEquals(MUSIC_SONGS, YouTube.getSearchQHFactory()
|
||||
.fromQuery("asdf", asList(new String[]{MUSIC_SONGS}), "").getContentFilters().get(0));
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -36,16 +45,23 @@ public class YoutubeSearchQHTest {
|
||||
.fromQuery("asdf", asList(new String[]{PLAYLISTS}), "").getUrl());
|
||||
assertEquals("https://www.youtube.com/results?search_query=asdf", YouTube.getSearchQHFactory()
|
||||
.fromQuery("asdf", asList(new String[]{"fjiijie"}), "").getUrl());
|
||||
|
||||
assertEquals("https://music.youtube.com/search?q=asdf", YouTube.getSearchQHFactory()
|
||||
.fromQuery("asdf", asList(new String[]{MUSIC_SONGS}), "").getUrl());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetAvailableContentFilter() {
|
||||
final String[] contentFilter = YouTube.getSearchQHFactory().getAvailableContentFilter();
|
||||
assertEquals(4, contentFilter.length);
|
||||
assertEquals(8, contentFilter.length);
|
||||
assertEquals("all", contentFilter[0]);
|
||||
assertEquals("videos", contentFilter[1]);
|
||||
assertEquals("channels", contentFilter[2]);
|
||||
assertEquals("playlists", contentFilter[3]);
|
||||
assertEquals("music_songs", contentFilter[4]);
|
||||
assertEquals("music_videos", contentFilter[5]);
|
||||
assertEquals("music_albums", contentFilter[6]);
|
||||
assertEquals("music_playlists", contentFilter[7]);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
Loading…
x
Reference in New Issue
Block a user