diff --git a/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java b/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java index 95643b4c..e708ef68 100644 --- a/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/AbstractRipper.java @@ -67,7 +67,7 @@ public abstract class AbstractRipper * Adds a URL to the url history file * @param downloadedURL URL to check if downloaded */ - private void writeDownloadedURL(String downloadedURL) throws IOException { + protected void writeDownloadedURL(String downloadedURL) throws IOException { // If "save urls only" is checked don't write to the url history file if (Utils.getConfigBoolean("urls_only.save", false)) { return; @@ -131,7 +131,7 @@ public abstract class AbstractRipper * Returns true if previously downloaded. * Returns false if not yet downloaded. */ - private boolean hasDownloadedURL(String url) { + protected boolean hasDownloadedURL(String url) { File file = new File(URLHistoryFile); url = normalizeUrl(url); @@ -280,6 +280,7 @@ public abstract class AbstractRipper saveFileAs.getParentFile().mkdirs(); } if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) { + LOGGER.info("Writing " + url.toExternalForm() + " to file"); try { writeDownloadedURL(url.toExternalForm() + "\n"); } catch (IOException e) { diff --git a/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java b/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java index 97943b33..ddd8b14a 100644 --- a/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/AlbumRipper.java @@ -51,7 +51,16 @@ public abstract class AlbumRipper extends AbstractRipper { * Queues multiple URLs of single images to download from a single Album URL */ public boolean addURLToDownload(URL url, File saveAs, String referrer, Map cookies, Boolean getFileExtFromMIME) { - // Only download one file if this is a test. + // Don't re-add the url if it was downloaded in a previous rip + if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) { + if (hasDownloadedURL(url.toExternalForm())) { + sendUpdate(STATUS.DOWNLOAD_WARN, "Already downloaded " + url.toExternalForm()); + alreadyDownloadedUrls += 1; + return false; + } + } + + // Only download one file if this is a test. if (super.isThisATest() && (itemsPending.size() > 0 || itemsCompleted.size() > 0 || itemsErrored.size() > 0)) { stop(); @@ -87,6 +96,14 @@ public abstract class AlbumRipper extends AbstractRipper { } threadPool.addThread(dft); } + if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) { + LOGGER.info("Writing " + url.toExternalForm() + " to file"); + try { + writeDownloadedURL(url.toExternalForm() + "\n"); + } catch (IOException e) { + LOGGER.debug("Unable to write URL history file"); + } + } return true; } diff --git a/src/main/java/com/rarchives/ripme/ripper/rippers/RedditRipper.java b/src/main/java/com/rarchives/ripme/ripper/rippers/RedditRipper.java index 172250e3..f0984d7d 100644 --- a/src/main/java/com/rarchives/ripme/ripper/rippers/RedditRipper.java +++ b/src/main/java/com/rarchives/ripme/ripper/rippers/RedditRipper.java @@ -18,10 +18,6 @@ import com.rarchives.ripme.ui.UpdateUtils; import com.rarchives.ripme.utils.Http; import com.rarchives.ripme.utils.RipUtils; import com.rarchives.ripme.utils.Utils; -import org.jsoup.Jsoup; - -import javax.swing.text.Document; -import javax.swing.text.Element; public class RedditRipper extends AlbumRipper { @@ -208,7 +204,6 @@ public class RedditRipper extends AlbumRipper { largestHeight = Integer.parseInt(height); baseURL = doc.select("MPD > Period > AdaptationSet > Representation[height=" + height + "]").select("BaseURL").text(); } - LOGGER.info("H " + e.attr("height") + " V " + e.attr("width")); } return new URL(vidURL + "/" + baseURL); } catch (IOException e) { @@ -251,6 +246,7 @@ public class RedditRipper extends AlbumRipper { savePath += id + "-" + url.split("/")[3] + title + ".mp4"; URL urlToDownload = parseRedditVideoMPD(urls.get(0).toExternalForm()); if (urlToDownload != null) { + LOGGER.info("url: " + urlToDownload + " file: " + savePath); addURLToDownload(urlToDownload, new File(savePath)); } }