Skip to content

Commit

Permalink
Merge pull request #8 from RipMeApp/master
Browse files Browse the repository at this point in the history
Merge
  • Loading branch information
rephormat authored May 21, 2018
2 parents 7978f7c + d11dfe8 commit 4f9b912
Show file tree
Hide file tree
Showing 99 changed files with 2,710 additions and 694 deletions.
2 changes: 0 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,6 @@ For information about running the `.jar` file, see [the How To Run wiki](https:/

## [Changelog](https://github.com/ripmeapp/ripme/blob/master/ripme.json) (ripme.json)

## [Website](http://rip.rarchives.com/)

# Features

* Quickly downloads all images in an online album (see supported sites below)
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<groupId>com.rarchives.ripme</groupId>
<artifactId>ripme</artifactId>
<packaging>jar</packaging>
<version>1.7.27</version>
<version>1.7.47</version>
<name>ripme</name>
<url>http://rip.rarchives.com</url>
<properties>
Expand Down
22 changes: 21 additions & 1 deletion ripme.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,26 @@
{
"latestVersion": "1.7.27",
"latestVersion": "1.7.47",
"changeList": [
"1.7.47: Added quick queue support for hentai2read ripper; Fixed instagram ripper; SankakuComplexRipper can now download from different subdomains; Added ripper for bato.to; Added quick queue support for 8muses.download; ",
"1.7.46: Fixed hentai2read ripper; Rewrote the myhentaicomics ripper to use the new getAlbumsToQueue func; Can now blacklist nhentai tags; SinnercomicsRipper no longer adds -page-01 to folder names; EightmusesRipper now adds file extension to filename; disbaled test for twitch ripper",
"1.7.45: Fixed hentai2read ripper; ImageBam album fixed; Added various translations; TsuminoRipper no longer requires album name to download",
"1.7.44: Fixed instagram ripper regex",
"1.7.43: Fixed queryId regex in instagram ripper",
"1.7.42: Added user support to SmuttyRipper; Removed vine ripper; Fixed NudeGalsRipper; addURLToDownload improvments; Fixed Instagram ripper",
"1.7.41: Added support for spyingwithlana.com; Added ManganeloRipper; Added support for dynasty-scans.com",
"1.7.40: Added hypnohub.net ripper; Fixed rule34.xxx ripper; Tsumino Ripper now add .png to filenames",
"1.7.39: Added rule34.xxx ripper; Added Gfycatporntube.com ripper; Fixed AbstractRipper subdir bug; Added AbstractRipper unit tests",
"1.7.38: Added http and socks proxy support; Extended some unit tests to include getGid; Added HitomiRipper; hentaifoundry ripper now can rip all images from accounts",
"1.7.37: MInor code clean up; Added socks proxy support; Added support for 8muses.download; Hentaifoundry no longer errors when there are no more pages; Fix bug that causes tumblr to replace https with httpss when downloading resized images",
"1.7.36: Fixed Instagram ripper; Fixed hentai2read ripper test; Fixed tnbtu.com ripper",
"1.7.35: Fixed instagram ripper; hentai2read ripper now properly names folders",
"1.7.34: Added Blackbrickroadofoz Ripper; Fixed webtoons regex",
"1.7.33: Instagram ripper no longer errors out when downloading from more than 1 page",
"1.7.32: Instagram ripper update to use new enpoints",
"1.7.31: InstaGram ripper no longer errors out when getting next page",
"1.7.30: Fixed usage of command-line on non-headless systems",
"1.7.29: Cano now download single images from imgur; Improved handling of headless mode & OS-specific config; Added modelx ripper; Fixed eroshae ripper",
"1.7.28: IG ripper now uses display_url when downloading images; Reddit ripper now gets erome links; Hentaifoundry Ripper no longer errors out when there is no next page",
"1.7.27: IG ripper can now rip from tags; fixed json parsing issues",
"1.7.26: fixed instagram ripper",
"1.7.25: Fixed instagram ripper; Added an option to use short names for 8muses; Added tsuminoRipper; Added support for incase.buttsmithy.com",
Expand Down
94 changes: 77 additions & 17 deletions src/main/java/com/rarchives/ripme/App.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.rarchives.ripme;

import java.awt.*;
import java.io.File;
import java.io.IOException;
import java.io.BufferedReader;
Expand All @@ -18,55 +19,72 @@
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang.SystemUtils;
import org.apache.log4j.Logger;

import com.rarchives.ripme.ripper.AbstractRipper;
import com.rarchives.ripme.ui.History;
import com.rarchives.ripme.ui.HistoryEntry;
import com.rarchives.ripme.ui.MainWindow;
import com.rarchives.ripme.ui.UpdateUtils;
import com.rarchives.ripme.utils.Proxy;
import com.rarchives.ripme.utils.RipUtils;
import com.rarchives.ripme.utils.Utils;

/**
* Entry point to application.
* This is where all the fun happens, with the main method.
* Decides to display UI or to run silently via command-line.
*
* As the "controller" to all other classes, it parses command line parameters and loads the history.
*/
public class App {

public static final Logger logger;
public static final Logger logger = Logger.getLogger(App.class);
private static final History HISTORY = new History();

static {
//initialize logger
Utils.configureLogger();
logger = Logger.getLogger(App.class);
}

/**
* Where everything starts. Takes in, and tries to parse as many commandline arguments as possible.
* Otherwise, it launches a GUI.
*
* @param args Array of command line arguments.
*/
public static void main(String[] args) throws MalformedURLException {
CommandLine cl = getArgs(args);

if (args.length > 0 && cl.hasOption('v')){
logger.error(UpdateUtils.getThisJarVersion());
logger.info(UpdateUtils.getThisJarVersion());
System.exit(0);
}

System.setProperty("apple.laf.useScreenMenuBar", "true");
System.setProperty("com.apple.mrj.application.apple.menu.about.name", "RipMe");
logger.info("Initialized ripme v" + UpdateUtils.getThisJarVersion());
if (Utils.getConfigString("proxy.http", null) != null) {
Proxy.setHTTPProxy(Utils.getConfigString("proxy.http", null));
} else if (Utils.getConfigString("proxy.socks", null) != null) {
Proxy.setSocks(Utils.getConfigString("proxy.socks", null));
}

if (args.length > 0) {
// CLI Mode
if (GraphicsEnvironment.isHeadless() || args.length > 0) {
handleArguments(args);
} else {
// GUI Mode
if (SystemUtils.IS_OS_MAC_OSX) {
System.setProperty("apple.laf.useScreenMenuBar", "true");
System.setProperty("com.apple.mrj.application.apple.menu.about.name", "RipMe");
}

Utils.configureLogger();

logger.info("Initialized ripme v" + UpdateUtils.getThisJarVersion());

MainWindow mw = new MainWindow();
SwingUtilities.invokeLater(mw);
}
}

/**
* Creates an abstract ripper and instructs it to rip.
* @param url URL to be ripped
* @throws Exception
* @throws Exception Nothing too specific here, just a catch-all.
*
*/
private static void rip(URL url) throws Exception {
AbstractRipper ripper = AbstractRipper.getRipper(url);
Expand All @@ -80,20 +98,45 @@ private static void rip(URL url) throws Exception {
*/
private static void handleArguments(String[] args) {
CommandLine cl = getArgs(args);
if (cl.hasOption('h')) {

//Help (list commands)
if (cl.hasOption('h') || args.length == 0) {
HelpFormatter hf = new HelpFormatter();
hf.printHelp("java -jar ripme.jar [OPTIONS]", getOptions());
System.exit(0);
}

Utils.configureLogger();
logger.info("Initialized ripme v" + UpdateUtils.getThisJarVersion());

//Allow file overwriting
if (cl.hasOption('w')) {
Utils.setConfigBoolean("file.overwrite", true);
}

//SOCKS proxy server
if (cl.hasOption('s')) {
String sservfull = cl.getOptionValue('s').trim();
Proxy.setSocks(sservfull);
}

//HTTP proxy server
if (cl.hasOption('p')) {
String proxyserverfull = cl.getOptionValue('p').trim();
Proxy.setHTTPProxy(proxyserverfull);
}

//Number of threads
if (cl.hasOption('t')) {
Utils.setConfigInteger("threads.size", Integer.parseInt(cl.getOptionValue('t')));
}

//Ignore 404
if (cl.hasOption('4')) {
Utils.setConfigBoolean("errors.skip404", true);
}

//Re-rip <i>all</i> previous albums
if (cl.hasOption('r')) {
// Re-rip all via command-line
List<String> history = Utils.getConfigList("download.history");
Expand All @@ -115,6 +158,8 @@ private static void handleArguments(String[] args) {
// Exit
System.exit(0);
}

//Re-rip all <i>selected</i> albums
if (cl.hasOption('R')) {
loadHistory();
if (HISTORY.toList().isEmpty()) {
Expand Down Expand Up @@ -146,20 +191,30 @@ private static void handleArguments(String[] args) {
System.exit(-1);
}
}

//Save the order of images in album
if (cl.hasOption('d')) {
Utils.setConfigBoolean("download.save_order", true);
}

//Don't save the order of images in album
if (cl.hasOption('D')) {
Utils.setConfigBoolean("download.save_order", false);
}

//In case specify both, break and exit since it isn't possible.
if ((cl.hasOption('d'))&&(cl.hasOption('D'))) {
logger.error("\nCannot specify '-d' and '-D' simultaneously");
System.exit(-1);
}

//Destination directory
if (cl.hasOption('l')) {
// change the default rips directory
Utils.setConfigString("rips.directory", cl.getOptionValue('l'));
}

//Read URLs from File
if (cl.hasOption('f')) {
String filename = cl.getOptionValue('f');
try {
Expand All @@ -175,10 +230,13 @@ private static void handleArguments(String[] args) {
logger.error("[!] Failed reading file containing list of URLs. Cannot continue.");
}
}

//The URL to rip.
if (cl.hasOption('u')) {
String url = cl.getOptionValue('u').trim();
ripURL(url, cl.hasOption("n"));
}

}

/**
Expand Down Expand Up @@ -226,6 +284,8 @@ private static Options getOptions() {
opts.addOption("n", "no-prop-file", false, "Do not create properties file.");
opts.addOption("f", "urls-file", true, "Rip URLs from a file.");
opts.addOption("v", "version", false, "Show current version");
opts.addOption("s", "socks-server", true, "Use socks server ([user:password]@host[:port])");
opts.addOption("p", "proxy-server", true, "Use HTTP Proxy server ([user:password]@host[:port])");
return opts;
}

Expand All @@ -244,7 +304,7 @@ private static CommandLine getArgs(String[] args) {
return null;
}
}

/**
* Loads history from history file into memory.
*/
Expand Down
28 changes: 28 additions & 0 deletions src/main/java/com/rarchives/ripme/ripper/AbstractHTMLRipper.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
import com.rarchives.ripme.ui.MainWindow;

/**
* Simplified ripper, designed for ripping from sites by parsing HTML.
Expand Down Expand Up @@ -53,12 +54,29 @@ public URL sanitizeURL(URL url) throws MalformedURLException {
protected boolean hasDescriptionSupport() {
return false;
}

protected String[] getDescription(String url, Document page) throws IOException {
throw new IOException("getDescription not implemented"); // Do I do this or make an abstract function?
}
protected int descSleepTime() {
return 100;
}

protected List<String> getAlbumsToQueue(Document doc) {
return null;
}

// If a page has Queue support then it has no images we want to download, just a list of urls we want to add to
// the queue
protected boolean hasQueueSupport() {
return false;
}

// Takes a url and checks if it is for a page of albums
protected boolean pageContainsAlbums(URL url) {
return false;
}

@Override
public void rip() throws IOException {
int index = 0;
Expand All @@ -67,6 +85,16 @@ public void rip() throws IOException {
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
Document doc = getFirstPage();

if (hasQueueSupport() && pageContainsAlbums(this.url)) {
List<String> urls = getAlbumsToQueue(doc);
for (String url : urls) {
MainWindow.addUrlToQueue(url);
}

// We set doc to null here so the while loop below this doesn't fire
doc = null;
}

while (doc != null) {
if (alreadyDownloadedUrls >= Utils.getConfigInteger("history.end_rip_after_already_seen", 1000000000) && !isThisATest()) {
sendUpdate(STATUS.DOWNLOAD_COMPLETE, "Already seen the last " + alreadyDownloadedUrls + " images ending rip");
Expand Down
Loading

0 comments on commit 4f9b912

Please sign in to comment.