1
0
mirror of https://github.com/RipMeApp/ripme.git synced 2025-08-22 21:43:06 +02:00

Merge pull request #1293 from Tush-r/album

Removal of AlbumRipper.
This commit is contained in:
cyian-1756
2019-08-30 12:12:33 -05:00
committed by GitHub
3 changed files with 440 additions and 13 deletions

View File

@@ -2,21 +2,29 @@ package com.rarchives.ripme.ripper;
import java.io.File; import java.io.File;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS; import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils; import com.rarchives.ripme.utils.Utils;
import com.rarchives.ripme.ui.MainWindow; import com.rarchives.ripme.ui.MainWindow;
import com.rarchives.ripme.ui.RipStatusMessage;
/** /**
* Simplified ripper, designed for ripping from sites by parsing HTML. * Simplified ripper, designed for ripping from sites by parsing HTML.
*/ */
public abstract class AbstractHTMLRipper extends AlbumRipper { public abstract class AbstractHTMLRipper extends AbstractRipper {
private Map<URL, File> itemsPending = Collections.synchronizedMap(new HashMap<URL, File>());
private Map<URL, File> itemsCompleted = Collections.synchronizedMap(new HashMap<URL, File>());
private Map<URL, String> itemsErrored = Collections.synchronizedMap(new HashMap<URL, String>());
protected AbstractHTMLRipper(URL url) throws IOException { protected AbstractHTMLRipper(URL url) throws IOException {
super(url); super(url);
@@ -262,4 +270,210 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
} }
return prefix; return prefix;
} }
/*
* ------ Methods copied from AlbumRipper. ------
* This removes AlbumnRipper's usage from this class.
*/
protected boolean allowDuplicates() {
return false;
}
@Override
/**
* Returns total amount of files attempted.
*/
public int getCount() {
return itemsCompleted.size() + itemsErrored.size();
}
@Override
/**
* Queues multiple URLs of single images to download from a single Album URL
*/
public boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String,String> cookies, Boolean getFileExtFromMIME) {
// Only download one file if this is a test.
if (super.isThisATest() &&
(itemsPending.size() > 0 || itemsCompleted.size() > 0 || itemsErrored.size() > 0)) {
stop();
return false;
}
if (!allowDuplicates()
&& ( itemsPending.containsKey(url)
|| itemsCompleted.containsKey(url)
|| itemsErrored.containsKey(url) )) {
// Item is already downloaded/downloading, skip it.
LOGGER.info("[!] Skipping " + url + " -- already attempted: " + Utils.removeCWD(saveAs));
return false;
}
if (Utils.getConfigBoolean("urls_only.save", false)) {
// Output URL to file
String urlFile = this.workingDir + File.separator + "urls.txt";
try (FileWriter fw = new FileWriter(urlFile, true)) {
fw.write(url.toExternalForm());
fw.write(System.lineSeparator());
itemsCompleted.put(url, new File(urlFile));
} catch (IOException e) {
LOGGER.error("Error while writing to " + urlFile, e);
}
}
else {
itemsPending.put(url, saveAs);
DownloadFileThread dft = new DownloadFileThread(url, saveAs, this, getFileExtFromMIME);
if (referrer != null) {
dft.setReferrer(referrer);
}
if (cookies != null) {
dft.setCookies(cookies);
}
threadPool.addThread(dft);
}
return true;
}
@Override
public boolean addURLToDownload(URL url, File saveAs) {
return addURLToDownload(url, saveAs, null, null, false);
}
/**
* Queues image to be downloaded and saved.
* Uses filename from URL to decide filename.
* @param url
* URL to download
* @return
* True on success
*/
protected boolean addURLToDownload(URL url) {
// Use empty prefix and empty subdirectory
return addURLToDownload(url, "", "");
}
@Override
/**
* Cleans up & tells user about successful download
*/
public void downloadCompleted(URL url, File saveAs) {
if (observer == null) {
return;
}
try {
String path = Utils.removeCWD(saveAs);
RipStatusMessage msg = new RipStatusMessage(STATUS.DOWNLOAD_COMPLETE, path);
itemsPending.remove(url);
itemsCompleted.put(url, saveAs);
observer.update(this, msg);
checkIfComplete();
} catch (Exception e) {
LOGGER.error("Exception while updating observer: ", e);
}
}
@Override
/**
* Cleans up & tells user about failed download.
*/
public void downloadErrored(URL url, String reason) {
if (observer == null) {
return;
}
itemsPending.remove(url);
itemsErrored.put(url, reason);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_ERRORED, url + " : " + reason));
checkIfComplete();
}
@Override
/**
* Tells user that a single file in the album they wish to download has
* already been downloaded in the past.
*/
public void downloadExists(URL url, File file) {
if (observer == null) {
return;
}
itemsPending.remove(url);
itemsCompleted.put(url, file);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " already saved as " + file.getAbsolutePath()));
checkIfComplete();
}
/**
* Notifies observers and updates state if all files have been ripped.
*/
@Override
protected void checkIfComplete() {
if (observer == null) {
return;
}
if (itemsPending.isEmpty()) {
super.checkIfComplete();
}
}
/**
* Sets directory to save all ripped files to.
* @param url
* URL to define how the working directory should be saved.
* @throws
* IOException
*/
@Override
public void setWorkingDir(URL url) throws IOException {
String path = Utils.getWorkingDirectory().getCanonicalPath();
if (!path.endsWith(File.separator)) {
path += File.separator;
}
String title;
if (Utils.getConfigBoolean("album_titles.save", true)) {
title = getAlbumTitle(this.url);
} else {
title = super.getAlbumTitle(this.url);
}
LOGGER.debug("Using album title '" + title + "'");
title = Utils.filesystemSafe(title);
path += title;
path = Utils.getOriginalDirectory(path) + File.separator; // check for case sensitive (unix only)
this.workingDir = new File(path);
if (!this.workingDir.exists()) {
LOGGER.info("[+] Creating directory: " + Utils.removeCWD(this.workingDir));
this.workingDir.mkdirs();
}
LOGGER.debug("Set working directory to: " + this.workingDir);
}
/**
* @return
* Integer between 0 and 100 defining the progress of the album rip.
*/
@Override
public int getCompletionPercentage() {
double total = itemsPending.size() + itemsErrored.size() + itemsCompleted.size();
return (int) (100 * ( (total - itemsPending.size()) / total));
}
/**
* @return
* Human-readable information on the status of the current rip.
*/
@Override
public String getStatusText() {
StringBuilder sb = new StringBuilder();
sb.append(getCompletionPercentage())
.append("% ")
.append("- Pending: " ).append(itemsPending.size())
.append(", Completed: ").append(itemsCompleted.size())
.append(", Errored: " ).append(itemsErrored.size());
return sb.toString();
}
} }

View File

@@ -1,19 +1,27 @@
package com.rarchives.ripme.ripper; package com.rarchives.ripme.ripper;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import org.json.JSONObject; import org.json.JSONObject;
import com.rarchives.ripme.ui.RipStatusMessage;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS; import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils; import com.rarchives.ripme.utils.Utils;
/** /**
* Simplified ripper, designed for ripping from sites by parsing JSON. * Simplified ripper, designed for ripping from sites by parsing JSON.
*/ */
public abstract class AbstractJSONRipper extends AlbumRipper { public abstract class AbstractJSONRipper extends AbstractRipper {
private Map<URL, File> itemsPending = Collections.synchronizedMap(new HashMap<URL, File>());
private Map<URL, File> itemsCompleted = Collections.synchronizedMap(new HashMap<URL, File>());
private Map<URL, String> itemsErrored = Collections.synchronizedMap(new HashMap<URL, String>());
protected AbstractJSONRipper(URL url) throws IOException { protected AbstractJSONRipper(URL url) throws IOException {
super(url); super(url);
@@ -111,4 +119,209 @@ public abstract class AbstractJSONRipper extends AlbumRipper {
} }
return prefix; return prefix;
} }
/*
* ------ Methods copied from AlbumRipper ------
*/
protected boolean allowDuplicates() {
return false;
}
@Override
/**
* Returns total amount of files attempted.
*/
public int getCount() {
return itemsCompleted.size() + itemsErrored.size();
}
@Override
/**
* Queues multiple URLs of single images to download from a single Album URL
*/
public boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String,String> cookies, Boolean getFileExtFromMIME) {
// Only download one file if this is a test.
if (super.isThisATest() &&
(itemsPending.size() > 0 || itemsCompleted.size() > 0 || itemsErrored.size() > 0)) {
stop();
return false;
}
if (!allowDuplicates()
&& ( itemsPending.containsKey(url)
|| itemsCompleted.containsKey(url)
|| itemsErrored.containsKey(url) )) {
// Item is already downloaded/downloading, skip it.
LOGGER.info("[!] Skipping " + url + " -- already attempted: " + Utils.removeCWD(saveAs));
return false;
}
if (Utils.getConfigBoolean("urls_only.save", false)) {
// Output URL to file
String urlFile = this.workingDir + File.separator + "urls.txt";
try (FileWriter fw = new FileWriter(urlFile, true)) {
fw.write(url.toExternalForm());
fw.write(System.lineSeparator());
itemsCompleted.put(url, new File(urlFile));
} catch (IOException e) {
LOGGER.error("Error while writing to " + urlFile, e);
}
}
else {
itemsPending.put(url, saveAs);
DownloadFileThread dft = new DownloadFileThread(url, saveAs, this, getFileExtFromMIME);
if (referrer != null) {
dft.setReferrer(referrer);
}
if (cookies != null) {
dft.setCookies(cookies);
}
threadPool.addThread(dft);
}
return true;
}
@Override
public boolean addURLToDownload(URL url, File saveAs) {
return addURLToDownload(url, saveAs, null, null, false);
}
/**
* Queues image to be downloaded and saved.
* Uses filename from URL to decide filename.
* @param url
* URL to download
* @return
* True on success
*/
protected boolean addURLToDownload(URL url) {
// Use empty prefix and empty subdirectory
return addURLToDownload(url, "", "");
}
@Override
/**
* Cleans up & tells user about successful download
*/
public void downloadCompleted(URL url, File saveAs) {
if (observer == null) {
return;
}
try {
String path = Utils.removeCWD(saveAs);
RipStatusMessage msg = new RipStatusMessage(STATUS.DOWNLOAD_COMPLETE, path);
itemsPending.remove(url);
itemsCompleted.put(url, saveAs);
observer.update(this, msg);
checkIfComplete();
} catch (Exception e) {
LOGGER.error("Exception while updating observer: ", e);
}
}
@Override
/**
* Cleans up & tells user about failed download.
*/
public void downloadErrored(URL url, String reason) {
if (observer == null) {
return;
}
itemsPending.remove(url);
itemsErrored.put(url, reason);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_ERRORED, url + " : " + reason));
checkIfComplete();
}
@Override
/**
* Tells user that a single file in the album they wish to download has
* already been downloaded in the past.
*/
public void downloadExists(URL url, File file) {
if (observer == null) {
return;
}
itemsPending.remove(url);
itemsCompleted.put(url, file);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " already saved as " + file.getAbsolutePath()));
checkIfComplete();
}
/**
* Notifies observers and updates state if all files have been ripped.
*/
@Override
protected void checkIfComplete() {
if (observer == null) {
return;
}
if (itemsPending.isEmpty()) {
super.checkIfComplete();
}
}
/**
* Sets directory to save all ripped files to.
* @param url
* URL to define how the working directory should be saved.
* @throws
* IOException
*/
@Override
public void setWorkingDir(URL url) throws IOException {
String path = Utils.getWorkingDirectory().getCanonicalPath();
if (!path.endsWith(File.separator)) {
path += File.separator;
}
String title;
if (Utils.getConfigBoolean("album_titles.save", true)) {
title = getAlbumTitle(this.url);
} else {
title = super.getAlbumTitle(this.url);
}
LOGGER.debug("Using album title '" + title + "'");
title = Utils.filesystemSafe(title);
path += title;
path = Utils.getOriginalDirectory(path) + File.separator; // check for case sensitive (unix only)
this.workingDir = new File(path);
if (!this.workingDir.exists()) {
LOGGER.info("[+] Creating directory: " + Utils.removeCWD(this.workingDir));
this.workingDir.mkdirs();
}
LOGGER.debug("Set working directory to: " + this.workingDir);
}
/**
* @return
* Integer between 0 and 100 defining the progress of the album rip.
*/
@Override
public int getCompletionPercentage() {
double total = itemsPending.size() + itemsErrored.size() + itemsCompleted.size();
return (int) (100 * ( (total - itemsPending.size()) / total));
}
/**
* @return
* Human-readable information on the status of the current rip.
*/
@Override
public String getStatusText() {
StringBuilder sb = new StringBuilder();
sb.append(getCompletionPercentage())
.append("% ")
.append("- Pending: " ).append(itemsPending.size())
.append(", Completed: ").append(itemsCompleted.size())
.append(", Errored: " ).append(itemsErrored.size());
return sb.toString();
}
} }

View File

@@ -1,7 +1,11 @@
package com.rarchives.ripme.ripper; package com.rarchives.ripme.ripper;
import java.awt.Desktop; import java.awt.Desktop;
import java.io.*; import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
@@ -9,21 +13,17 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Observable; import java.util.Observable;
import java.util.Scanner;
import com.rarchives.ripme.App;
import org.apache.log4j.FileAppender; import org.apache.log4j.FileAppender;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.jsoup.HttpStatusException; import org.jsoup.HttpStatusException;
import com.rarchives.ripme.App;
import com.rarchives.ripme.ui.RipStatusComplete; import com.rarchives.ripme.ui.RipStatusComplete;
import com.rarchives.ripme.ui.RipStatusHandler; import com.rarchives.ripme.ui.RipStatusHandler;
import com.rarchives.ripme.ui.RipStatusMessage; import com.rarchives.ripme.ui.RipStatusMessage;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS; import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils; import com.rarchives.ripme.utils.Utils;
import java.io.File;
import java.util.Scanner;
public abstract class AbstractRipper public abstract class AbstractRipper
extends Observable extends Observable
implements RipperInterface, Runnable { implements RipperInterface, Runnable {
@@ -548,7 +548,7 @@ public abstract class AbstractRipper
public static AbstractRipper getRipper(URL url) throws Exception { public static AbstractRipper getRipper(URL url) throws Exception {
for (Constructor<?> constructor : getRipperConstructors("com.rarchives.ripme.ripper.rippers")) { for (Constructor<?> constructor : getRipperConstructors("com.rarchives.ripme.ripper.rippers")) {
try { try {
AlbumRipper ripper = (AlbumRipper) constructor.newInstance(url); // by design: can throw ClassCastException AbstractRipper ripper = (AbstractRipper) constructor.newInstance(url); // by design: can throw ClassCastException
LOGGER.debug("Found album ripper: " + ripper.getClass().getName()); LOGGER.debug("Found album ripper: " + ripper.getClass().getName());
return ripper; return ripper;
} catch (Exception e) { } catch (Exception e) {