Skip to content

Commit

Permalink
Merge pull request #788 from Stypox/mix
Browse files Browse the repository at this point in the history
Add MixInfoItem and extract YouTube mixes in related items
  • Loading branch information
XiangRongLin authored Mar 19, 2022
2 parents d8f2031 + 09ddb6a commit 7f2ea13
Show file tree
Hide file tree
Showing 58 changed files with 4,343 additions and 1,651 deletions.
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
package org.schabi.newpipe.extractor.search;
package org.schabi.newpipe.extractor;

import org.schabi.newpipe.extractor.InfoItem;
import org.schabi.newpipe.extractor.InfoItemExtractor;
import org.schabi.newpipe.extractor.InfoItemsCollector;
import org.schabi.newpipe.extractor.channel.ChannelInfoItemExtractor;
import org.schabi.newpipe.extractor.channel.ChannelInfoItemsCollector;
import org.schabi.newpipe.extractor.exceptions.ParsingException;
Expand Down Expand Up @@ -36,23 +33,24 @@
*/

/**
* Collector for search results
* A collector that can handle many extractor types, to be used when a list contains items of
* different types (e.g. search)
* <p>
* This collector can handle the following extractor types:
* <ul>
* <li>{@link StreamInfoItemExtractor}</li>
* <li>{@link ChannelInfoItemExtractor}</li>
* <li>{@link PlaylistInfoItemExtractor}</li>
* </ul>
* Calling {@link #extract(InfoItemExtractor)} or {@link #commit(Object)} with any
* Calling {@link #extract(InfoItemExtractor)} or {@link #commit(InfoItemExtractor)} with any
* other extractor type will raise an exception.
*/
public class InfoItemsSearchCollector extends InfoItemsCollector<InfoItem, InfoItemExtractor> {
public class MultiInfoItemsCollector extends InfoItemsCollector<InfoItem, InfoItemExtractor> {
private final StreamInfoItemsCollector streamCollector;
private final ChannelInfoItemsCollector userCollector;
private final PlaylistInfoItemsCollector playlistCollector;

public InfoItemsSearchCollector(int serviceId) {
public MultiInfoItemsCollector(int serviceId) {
super(serviceId);
streamCollector = new StreamInfoItemsCollector(serviceId);
userCollector = new ChannelInfoItemsCollector(serviceId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,8 @@ public String getSubChannelUrl() throws ParsingException {
public String getSubChannelAvatarUrl() throws ParsingException {
return EMPTY_STRING;
}

public PlaylistInfo.PlaylistType getPlaylistType() throws ParsingException {
return PlaylistInfo.PlaylistType.NORMAL;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,41 @@

public class PlaylistInfo extends ListInfo<StreamInfoItem> {

/**
* Mixes are handled as particular playlists in NewPipeExtractor. {@link PlaylistType#NORMAL} is
* for non-mixes, while other values are for the different types of mixes. The type of a mix
* depends on how its contents are autogenerated.
*/
public enum PlaylistType {
/**
* A normal playlist (not a mix)
*/
NORMAL,

/**
* A mix made only of streams related to a particular stream, for example YouTube mixes
*/
MIX_STREAM,

/**
* A mix made only of music streams related to a particular stream, for example YouTube
* music mixes
*/
MIX_MUSIC,

/**
* A mix made only of streams from (or related to) the same channel, for example YouTube
* channel mixes
*/
MIX_CHANNEL,

/**
* A mix made only of streams related to a particular (musical) genre, for example YouTube
* genre mixes
*/
MIX_GENRE,
}

private PlaylistInfo(int serviceId, ListLinkHandler linkHandler, String name) throws ParsingException {
super(serviceId, linkHandler, name);
}
Expand Down Expand Up @@ -105,6 +140,11 @@ public static PlaylistInfo getInfo(PlaylistExtractor extractor) throws Extractio
} catch (Exception e) {
info.addError(e);
}
try {
info.setPlaylistType(extractor.getPlaylistType());
} catch (Exception e) {
info.addError(e);
}
// do not fail if everything but the uploader infos could be collected
if (!uploaderParsingErrors.isEmpty() &&
(!info.getErrors().isEmpty() || uploaderParsingErrors.size() < 3)) {
Expand All @@ -127,6 +167,7 @@ public static PlaylistInfo getInfo(PlaylistExtractor extractor) throws Extractio
private String subChannelName;
private String subChannelAvatarUrl;
private long streamCount = 0;
private PlaylistType playlistType;

public String getThumbnailUrl() {
return thumbnailUrl;
Expand Down Expand Up @@ -199,4 +240,12 @@ public long getStreamCount() {
public void setStreamCount(long streamCount) {
this.streamCount = streamCount;
}

public PlaylistType getPlaylistType() {
return playlistType;
}

public void setPlaylistType(final PlaylistType playlistType) {
this.playlistType = playlistType;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ public class PlaylistInfoItem extends InfoItem {
* How many streams this playlist have
*/
private long streamCount = 0;
private PlaylistInfo.PlaylistType playlistType;

public PlaylistInfoItem(int serviceId, String url, String name) {
super(InfoType.PLAYLIST, serviceId, url, name);
Expand All @@ -29,4 +30,12 @@ public long getStreamCount() {
public void setStreamCount(long stream_count) {
this.streamCount = stream_count;
}

public PlaylistInfo.PlaylistType getPlaylistType() {
return playlistType;
}

public void setPlaylistType(final PlaylistInfo.PlaylistType playlistType) {
this.playlistType = playlistType;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import org.schabi.newpipe.extractor.InfoItemExtractor;
import org.schabi.newpipe.extractor.exceptions.ParsingException;

import javax.annotation.Nonnull;

public interface PlaylistInfoItemExtractor extends InfoItemExtractor {

/**
Expand All @@ -18,4 +20,13 @@ public interface PlaylistInfoItemExtractor extends InfoItemExtractor {
* @throws ParsingException
*/
long getStreamCount() throws ParsingException;

/**
* @return the type of this playlist, see {@link PlaylistInfo.PlaylistType} for a description
* of types. If not overridden always returns {@link PlaylistInfo.PlaylistType#NORMAL}.
*/
@Nonnull
default PlaylistInfo.PlaylistType getPlaylistType() throws ParsingException {
return PlaylistInfo.PlaylistType.NORMAL;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ public PlaylistInfoItem extract(PlaylistInfoItemExtractor extractor) throws Pars
} catch (Exception e) {
addError(e);
}
try {
resultItem.setPlaylistType(extractor.getPlaylistType());
} catch (Exception e) {
addError(e);
}
return resultItem;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
import org.schabi.newpipe.extractor.exceptions.ParsingException;
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
import org.schabi.newpipe.extractor.search.InfoItemsSearchCollector;
import org.schabi.newpipe.extractor.MultiInfoItemsCollector;
import org.schabi.newpipe.extractor.search.SearchExtractor;
import org.schabi.newpipe.extractor.services.bandcamp.extractors.streaminfoitem.BandcampSearchStreamInfoItemExtractor;

Expand Down Expand Up @@ -50,7 +50,7 @@ public List<MetaInfo> getMetaInfo() throws ParsingException {
public InfoItemsPage<InfoItem> getPage(final Page page) throws IOException, ExtractionException {
final String html = getDownloader().get(page.getUrl()).responseBody();

final InfoItemsSearchCollector collector = new InfoItemsSearchCollector(getServiceId());
final MultiInfoItemsCollector collector = new MultiInfoItemsCollector(getServiceId());


final Document d = Jsoup.parse(html);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
import org.schabi.newpipe.extractor.exceptions.ParsingException;
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
import org.schabi.newpipe.extractor.search.InfoItemsSearchCollector;
import org.schabi.newpipe.extractor.MultiInfoItemsCollector;
import org.schabi.newpipe.extractor.search.SearchExtractor;
import org.schabi.newpipe.extractor.services.media_ccc.extractors.infoItems.MediaCCCStreamInfoItemExtractor;
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCConferencesListLinkHandlerFactory;
Expand Down Expand Up @@ -66,7 +66,7 @@ public List<MetaInfo> getMetaInfo() {
@Nonnull
@Override
public InfoItemsPage<InfoItem> getInitialPage() {
final InfoItemsSearchCollector searchItems = new InfoItemsSearchCollector(getServiceId());
final MultiInfoItemsCollector searchItems = new MultiInfoItemsCollector(getServiceId());

if (getLinkHandler().getContentFilters().contains(CONFERENCES)
|| getLinkHandler().getContentFilters().contains(ALL)
Expand Down Expand Up @@ -122,7 +122,7 @@ public void onFetchPage(@Nonnull final Downloader downloader)

private void searchConferences(final String searchString,
final List<ChannelInfoItem> channelItems,
final InfoItemsSearchCollector collector) {
final MultiInfoItemsCollector collector) {
for (final ChannelInfoItem item : channelItems) {
if (item.getName().toUpperCase().contains(
searchString.toUpperCase())) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
import org.schabi.newpipe.extractor.exceptions.ParsingException;
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
import org.schabi.newpipe.extractor.search.InfoItemsSearchCollector;
import org.schabi.newpipe.extractor.MultiInfoItemsCollector;
import org.schabi.newpipe.extractor.search.SearchExtractor;
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
import org.schabi.newpipe.extractor.utils.Utils;
Expand Down Expand Up @@ -87,7 +87,7 @@ public InfoItemsPage<InfoItem> getPage(final Page page) throws IOException, Extr
PeertubeParsingHelper.validate(json);
final long total = json.getLong("total");

final InfoItemsSearchCollector collector = new InfoItemsSearchCollector(getServiceId());
final MultiInfoItemsCollector collector = new MultiInfoItemsCollector(getServiceId());
collectStreamsFrom(collector, json, getBaseUrl(), sepia);

return new InfoItemsPage<>(collector, PeertubeParsingHelper.getNextPage(page.getUrl(), total));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
import org.schabi.newpipe.extractor.exceptions.ParsingException;
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
import org.schabi.newpipe.extractor.search.InfoItemsSearchCollector;
import org.schabi.newpipe.extractor.MultiInfoItemsCollector;
import org.schabi.newpipe.extractor.search.SearchExtractor;
import org.schabi.newpipe.extractor.utils.Parser;

Expand Down Expand Up @@ -100,7 +100,7 @@ public void onFetchPage(@Nonnull final Downloader downloader) throws IOException

private InfoItemsCollector<InfoItem, InfoItemExtractor> collectItems(
final JsonArray searchCollection) {
final InfoItemsSearchCollector collector = new InfoItemsSearchCollector(getServiceId());
final MultiInfoItemsCollector collector = new MultiInfoItemsCollector(getServiceId());

for (final Object result : searchCollection) {
if (!(result instanceof JsonObject)) continue;
Expand Down
Loading

0 comments on commit 7f2ea13

Please sign in to comment.