1
0
mirror of https://github.com/RipMeApp/ripme.git synced 2025-08-26 23:34:53 +02:00

Started on fixing reddit redownloading issue

This commit is contained in:
cyian-1756
2018-11-13 00:06:55 -05:00
parent 4acfb57b9c
commit 6e86e94bd7
3 changed files with 14 additions and 3 deletions

View File

@@ -131,7 +131,7 @@ public abstract class AbstractRipper
* Returns true if previously downloaded. * Returns true if previously downloaded.
* Returns false if not yet downloaded. * Returns false if not yet downloaded.
*/ */
private boolean hasDownloadedURL(String url) { protected boolean hasDownloadedURL(String url) {
File file = new File(URLHistoryFile); File file = new File(URLHistoryFile);
url = normalizeUrl(url); url = normalizeUrl(url);
@@ -280,6 +280,7 @@ public abstract class AbstractRipper
saveFileAs.getParentFile().mkdirs(); saveFileAs.getParentFile().mkdirs();
} }
if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) { if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) {
LOGGER.info("Writing " + url.toExternalForm() + " to file");
try { try {
writeDownloadedURL(url.toExternalForm() + "\n"); writeDownloadedURL(url.toExternalForm() + "\n");
} catch (IOException e) { } catch (IOException e) {

View File

@@ -51,7 +51,16 @@ public abstract class AlbumRipper extends AbstractRipper {
* Queues multiple URLs of single images to download from a single Album URL * Queues multiple URLs of single images to download from a single Album URL
*/ */
public boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String,String> cookies, Boolean getFileExtFromMIME) { public boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String,String> cookies, Boolean getFileExtFromMIME) {
// Only download one file if this is a test. // Don't re-add the url if it was downloaded in a previous rip
if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) {
if (hasDownloadedURL(url.toExternalForm())) {
sendUpdate(STATUS.DOWNLOAD_WARN, "Already downloaded " + url.toExternalForm());
alreadyDownloadedUrls += 1;
return false;
}
}
// Only download one file if this is a test.
if (super.isThisATest() && if (super.isThisATest() &&
(itemsPending.size() > 0 || itemsCompleted.size() > 0 || itemsErrored.size() > 0)) { (itemsPending.size() > 0 || itemsCompleted.size() > 0 || itemsErrored.size() > 0)) {
stop(); stop();

View File

@@ -8,6 +8,7 @@ import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import com.rarchives.ripme.ripper.AbstractRipper;
import com.rarchives.ripme.ui.RipStatusMessage; import com.rarchives.ripme.ui.RipStatusMessage;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONObject; import org.json.JSONObject;
@@ -208,7 +209,6 @@ public class RedditRipper extends AlbumRipper {
largestHeight = Integer.parseInt(height); largestHeight = Integer.parseInt(height);
baseURL = doc.select("MPD > Period > AdaptationSet > Representation[height=" + height + "]").select("BaseURL").text(); baseURL = doc.select("MPD > Period > AdaptationSet > Representation[height=" + height + "]").select("BaseURL").text();
} }
LOGGER.info("H " + e.attr("height") + " V " + e.attr("width"));
} }
return new URL(vidURL + "/" + baseURL); return new URL(vidURL + "/" + baseURL);
} catch (IOException e) { } catch (IOException e) {
@@ -251,6 +251,7 @@ public class RedditRipper extends AlbumRipper {
savePath += id + "-" + url.split("/")[3] + title + ".mp4"; savePath += id + "-" + url.split("/")[3] + title + ".mp4";
URL urlToDownload = parseRedditVideoMPD(urls.get(0).toExternalForm()); URL urlToDownload = parseRedditVideoMPD(urls.get(0).toExternalForm());
if (urlToDownload != null) { if (urlToDownload != null) {
LOGGER.info("url: " + urlToDownload + " file: " + savePath);
addURLToDownload(urlToDownload, new File(savePath)); addURLToDownload(urlToDownload, new File(savePath));
} }
} }