1
0
mirror of https://github.com/RipMeApp/ripme.git synced 2025-08-27 07:44:20 +02:00

Merge pull request #1047 from cyian-1756/reddit-remember-url-fixes

Reddit ripper no long redownloads reddit videos
This commit is contained in:
cyian-1756
2018-12-22 14:34:06 -05:00
committed by GitHub
3 changed files with 22 additions and 8 deletions

View File

@@ -67,7 +67,7 @@ public abstract class AbstractRipper
* Adds a URL to the url history file * Adds a URL to the url history file
* @param downloadedURL URL to check if downloaded * @param downloadedURL URL to check if downloaded
*/ */
private void writeDownloadedURL(String downloadedURL) throws IOException { protected void writeDownloadedURL(String downloadedURL) throws IOException {
// If "save urls only" is checked don't write to the url history file // If "save urls only" is checked don't write to the url history file
if (Utils.getConfigBoolean("urls_only.save", false)) { if (Utils.getConfigBoolean("urls_only.save", false)) {
return; return;
@@ -131,7 +131,7 @@ public abstract class AbstractRipper
* Returns true if previously downloaded. * Returns true if previously downloaded.
* Returns false if not yet downloaded. * Returns false if not yet downloaded.
*/ */
private boolean hasDownloadedURL(String url) { protected boolean hasDownloadedURL(String url) {
File file = new File(URLHistoryFile); File file = new File(URLHistoryFile);
url = normalizeUrl(url); url = normalizeUrl(url);
@@ -280,6 +280,7 @@ public abstract class AbstractRipper
saveFileAs.getParentFile().mkdirs(); saveFileAs.getParentFile().mkdirs();
} }
if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) { if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) {
LOGGER.info("Writing " + url.toExternalForm() + " to file");
try { try {
writeDownloadedURL(url.toExternalForm() + "\n"); writeDownloadedURL(url.toExternalForm() + "\n");
} catch (IOException e) { } catch (IOException e) {

View File

@@ -51,7 +51,16 @@ public abstract class AlbumRipper extends AbstractRipper {
* Queues multiple URLs of single images to download from a single Album URL * Queues multiple URLs of single images to download from a single Album URL
*/ */
public boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String,String> cookies, Boolean getFileExtFromMIME) { public boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String,String> cookies, Boolean getFileExtFromMIME) {
// Only download one file if this is a test. // Don't re-add the url if it was downloaded in a previous rip
if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) {
if (hasDownloadedURL(url.toExternalForm())) {
sendUpdate(STATUS.DOWNLOAD_WARN, "Already downloaded " + url.toExternalForm());
alreadyDownloadedUrls += 1;
return false;
}
}
// Only download one file if this is a test.
if (super.isThisATest() && if (super.isThisATest() &&
(itemsPending.size() > 0 || itemsCompleted.size() > 0 || itemsErrored.size() > 0)) { (itemsPending.size() > 0 || itemsCompleted.size() > 0 || itemsErrored.size() > 0)) {
stop(); stop();
@@ -87,6 +96,14 @@ public abstract class AlbumRipper extends AbstractRipper {
} }
threadPool.addThread(dft); threadPool.addThread(dft);
} }
if (Utils.getConfigBoolean("remember.url_history", true) && !isThisATest()) {
LOGGER.info("Writing " + url.toExternalForm() + " to file");
try {
writeDownloadedURL(url.toExternalForm() + "\n");
} catch (IOException e) {
LOGGER.debug("Unable to write URL history file");
}
}
return true; return true;
} }

View File

@@ -18,10 +18,6 @@ import com.rarchives.ripme.ui.UpdateUtils;
import com.rarchives.ripme.utils.Http; import com.rarchives.ripme.utils.Http;
import com.rarchives.ripme.utils.RipUtils; import com.rarchives.ripme.utils.RipUtils;
import com.rarchives.ripme.utils.Utils; import com.rarchives.ripme.utils.Utils;
import org.jsoup.Jsoup;
import javax.swing.text.Document;
import javax.swing.text.Element;
public class RedditRipper extends AlbumRipper { public class RedditRipper extends AlbumRipper {
@@ -208,7 +204,6 @@ public class RedditRipper extends AlbumRipper {
largestHeight = Integer.parseInt(height); largestHeight = Integer.parseInt(height);
baseURL = doc.select("MPD > Period > AdaptationSet > Representation[height=" + height + "]").select("BaseURL").text(); baseURL = doc.select("MPD > Period > AdaptationSet > Representation[height=" + height + "]").select("BaseURL").text();
} }
LOGGER.info("H " + e.attr("height") + " V " + e.attr("width"));
} }
return new URL(vidURL + "/" + baseURL); return new URL(vidURL + "/" + baseURL);
} catch (IOException e) { } catch (IOException e) {
@@ -251,6 +246,7 @@ public class RedditRipper extends AlbumRipper {
savePath += id + "-" + url.split("/")[3] + title + ".mp4"; savePath += id + "-" + url.split("/")[3] + title + ".mp4";
URL urlToDownload = parseRedditVideoMPD(urls.get(0).toExternalForm()); URL urlToDownload = parseRedditVideoMPD(urls.get(0).toExternalForm());
if (urlToDownload != null) { if (urlToDownload != null) {
LOGGER.info("url: " + urlToDownload + " file: " + savePath);
addURLToDownload(urlToDownload, new File(savePath)); addURLToDownload(urlToDownload, new File(savePath));
} }
} }