1
0
mirror of https://github.com/RipMeApp/ripme.git synced 2025-01-18 05:08:15 +01:00

Merge branch 'master' into issues/1402_confirm_clear

This commit is contained in:
Isaaku 2019-08-30 15:45:57 -05:00
commit 557a8be7a2
34 changed files with 1077 additions and 432 deletions

21
.github/workflows/maven.yml vendored Normal file
View File

@ -0,0 +1,21 @@
name: Java CI
on: [push, pull_request]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
java: [1.8, 1.9]
steps:
- uses: actions/checkout@v1
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
java-version: ${{ matrix.java }}
- name: Build with Maven
run: mvn package --file pom.xml

View File

@ -2,21 +2,29 @@ package com.rarchives.ripme.ripper;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.jsoup.nodes.Document;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
import com.rarchives.ripme.ui.MainWindow;
import com.rarchives.ripme.ui.RipStatusMessage;
/**
* Simplified ripper, designed for ripping from sites by parsing HTML.
*/
public abstract class AbstractHTMLRipper extends AlbumRipper {
public abstract class AbstractHTMLRipper extends AbstractRipper {
private Map<URL, File> itemsPending = Collections.synchronizedMap(new HashMap<URL, File>());
private Map<URL, File> itemsCompleted = Collections.synchronizedMap(new HashMap<URL, File>());
private Map<URL, String> itemsErrored = Collections.synchronizedMap(new HashMap<URL, String>());
protected AbstractHTMLRipper(URL url) throws IOException {
super(url);
@ -262,4 +270,210 @@ public abstract class AbstractHTMLRipper extends AlbumRipper {
}
return prefix;
}
/*
* ------ Methods copied from AlbumRipper. ------
* This removes AlbumnRipper's usage from this class.
*/
protected boolean allowDuplicates() {
return false;
}
@Override
/**
* Returns total amount of files attempted.
*/
public int getCount() {
return itemsCompleted.size() + itemsErrored.size();
}
@Override
/**
* Queues multiple URLs of single images to download from a single Album URL
*/
public boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String,String> cookies, Boolean getFileExtFromMIME) {
// Only download one file if this is a test.
if (super.isThisATest() &&
(itemsPending.size() > 0 || itemsCompleted.size() > 0 || itemsErrored.size() > 0)) {
stop();
return false;
}
if (!allowDuplicates()
&& ( itemsPending.containsKey(url)
|| itemsCompleted.containsKey(url)
|| itemsErrored.containsKey(url) )) {
// Item is already downloaded/downloading, skip it.
LOGGER.info("[!] Skipping " + url + " -- already attempted: " + Utils.removeCWD(saveAs));
return false;
}
if (Utils.getConfigBoolean("urls_only.save", false)) {
// Output URL to file
String urlFile = this.workingDir + File.separator + "urls.txt";
try (FileWriter fw = new FileWriter(urlFile, true)) {
fw.write(url.toExternalForm());
fw.write(System.lineSeparator());
itemsCompleted.put(url, new File(urlFile));
} catch (IOException e) {
LOGGER.error("Error while writing to " + urlFile, e);
}
}
else {
itemsPending.put(url, saveAs);
DownloadFileThread dft = new DownloadFileThread(url, saveAs, this, getFileExtFromMIME);
if (referrer != null) {
dft.setReferrer(referrer);
}
if (cookies != null) {
dft.setCookies(cookies);
}
threadPool.addThread(dft);
}
return true;
}
@Override
public boolean addURLToDownload(URL url, File saveAs) {
return addURLToDownload(url, saveAs, null, null, false);
}
/**
* Queues image to be downloaded and saved.
* Uses filename from URL to decide filename.
* @param url
* URL to download
* @return
* True on success
*/
protected boolean addURLToDownload(URL url) {
// Use empty prefix and empty subdirectory
return addURLToDownload(url, "", "");
}
@Override
/**
* Cleans up & tells user about successful download
*/
public void downloadCompleted(URL url, File saveAs) {
if (observer == null) {
return;
}
try {
String path = Utils.removeCWD(saveAs);
RipStatusMessage msg = new RipStatusMessage(STATUS.DOWNLOAD_COMPLETE, path);
itemsPending.remove(url);
itemsCompleted.put(url, saveAs);
observer.update(this, msg);
checkIfComplete();
} catch (Exception e) {
LOGGER.error("Exception while updating observer: ", e);
}
}
@Override
/**
* Cleans up & tells user about failed download.
*/
public void downloadErrored(URL url, String reason) {
if (observer == null) {
return;
}
itemsPending.remove(url);
itemsErrored.put(url, reason);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_ERRORED, url + " : " + reason));
checkIfComplete();
}
@Override
/**
* Tells user that a single file in the album they wish to download has
* already been downloaded in the past.
*/
public void downloadExists(URL url, File file) {
if (observer == null) {
return;
}
itemsPending.remove(url);
itemsCompleted.put(url, file);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " already saved as " + file.getAbsolutePath()));
checkIfComplete();
}
/**
* Notifies observers and updates state if all files have been ripped.
*/
@Override
protected void checkIfComplete() {
if (observer == null) {
return;
}
if (itemsPending.isEmpty()) {
super.checkIfComplete();
}
}
/**
* Sets directory to save all ripped files to.
* @param url
* URL to define how the working directory should be saved.
* @throws
* IOException
*/
@Override
public void setWorkingDir(URL url) throws IOException {
String path = Utils.getWorkingDirectory().getCanonicalPath();
if (!path.endsWith(File.separator)) {
path += File.separator;
}
String title;
if (Utils.getConfigBoolean("album_titles.save", true)) {
title = getAlbumTitle(this.url);
} else {
title = super.getAlbumTitle(this.url);
}
LOGGER.debug("Using album title '" + title + "'");
title = Utils.filesystemSafe(title);
path += title;
path = Utils.getOriginalDirectory(path) + File.separator; // check for case sensitive (unix only)
this.workingDir = new File(path);
if (!this.workingDir.exists()) {
LOGGER.info("[+] Creating directory: " + Utils.removeCWD(this.workingDir));
this.workingDir.mkdirs();
}
LOGGER.debug("Set working directory to: " + this.workingDir);
}
/**
* @return
* Integer between 0 and 100 defining the progress of the album rip.
*/
@Override
public int getCompletionPercentage() {
double total = itemsPending.size() + itemsErrored.size() + itemsCompleted.size();
return (int) (100 * ( (total - itemsPending.size()) / total));
}
/**
* @return
* Human-readable information on the status of the current rip.
*/
@Override
public String getStatusText() {
StringBuilder sb = new StringBuilder();
sb.append(getCompletionPercentage())
.append("% ")
.append("- Pending: " ).append(itemsPending.size())
.append(", Completed: ").append(itemsCompleted.size())
.append(", Errored: " ).append(itemsErrored.size());
return sb.toString();
}
}

View File

@ -1,19 +1,27 @@
package com.rarchives.ripme.ripper;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.json.JSONObject;
import com.rarchives.ripme.ui.RipStatusMessage;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
/**
* Simplified ripper, designed for ripping from sites by parsing JSON.
*/
public abstract class AbstractJSONRipper extends AlbumRipper {
public abstract class AbstractJSONRipper extends AbstractRipper {
private Map<URL, File> itemsPending = Collections.synchronizedMap(new HashMap<URL, File>());
private Map<URL, File> itemsCompleted = Collections.synchronizedMap(new HashMap<URL, File>());
private Map<URL, String> itemsErrored = Collections.synchronizedMap(new HashMap<URL, String>());
protected AbstractJSONRipper(URL url) throws IOException {
super(url);
@ -111,4 +119,209 @@ public abstract class AbstractJSONRipper extends AlbumRipper {
}
return prefix;
}
/*
* ------ Methods copied from AlbumRipper ------
*/
protected boolean allowDuplicates() {
return false;
}
@Override
/**
* Returns total amount of files attempted.
*/
public int getCount() {
return itemsCompleted.size() + itemsErrored.size();
}
@Override
/**
* Queues multiple URLs of single images to download from a single Album URL
*/
public boolean addURLToDownload(URL url, File saveAs, String referrer, Map<String,String> cookies, Boolean getFileExtFromMIME) {
// Only download one file if this is a test.
if (super.isThisATest() &&
(itemsPending.size() > 0 || itemsCompleted.size() > 0 || itemsErrored.size() > 0)) {
stop();
return false;
}
if (!allowDuplicates()
&& ( itemsPending.containsKey(url)
|| itemsCompleted.containsKey(url)
|| itemsErrored.containsKey(url) )) {
// Item is already downloaded/downloading, skip it.
LOGGER.info("[!] Skipping " + url + " -- already attempted: " + Utils.removeCWD(saveAs));
return false;
}
if (Utils.getConfigBoolean("urls_only.save", false)) {
// Output URL to file
String urlFile = this.workingDir + File.separator + "urls.txt";
try (FileWriter fw = new FileWriter(urlFile, true)) {
fw.write(url.toExternalForm());
fw.write(System.lineSeparator());
itemsCompleted.put(url, new File(urlFile));
} catch (IOException e) {
LOGGER.error("Error while writing to " + urlFile, e);
}
}
else {
itemsPending.put(url, saveAs);
DownloadFileThread dft = new DownloadFileThread(url, saveAs, this, getFileExtFromMIME);
if (referrer != null) {
dft.setReferrer(referrer);
}
if (cookies != null) {
dft.setCookies(cookies);
}
threadPool.addThread(dft);
}
return true;
}
@Override
public boolean addURLToDownload(URL url, File saveAs) {
return addURLToDownload(url, saveAs, null, null, false);
}
/**
* Queues image to be downloaded and saved.
* Uses filename from URL to decide filename.
* @param url
* URL to download
* @return
* True on success
*/
protected boolean addURLToDownload(URL url) {
// Use empty prefix and empty subdirectory
return addURLToDownload(url, "", "");
}
@Override
/**
* Cleans up & tells user about successful download
*/
public void downloadCompleted(URL url, File saveAs) {
if (observer == null) {
return;
}
try {
String path = Utils.removeCWD(saveAs);
RipStatusMessage msg = new RipStatusMessage(STATUS.DOWNLOAD_COMPLETE, path);
itemsPending.remove(url);
itemsCompleted.put(url, saveAs);
observer.update(this, msg);
checkIfComplete();
} catch (Exception e) {
LOGGER.error("Exception while updating observer: ", e);
}
}
@Override
/**
* Cleans up & tells user about failed download.
*/
public void downloadErrored(URL url, String reason) {
if (observer == null) {
return;
}
itemsPending.remove(url);
itemsErrored.put(url, reason);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_ERRORED, url + " : " + reason));
checkIfComplete();
}
@Override
/**
* Tells user that a single file in the album they wish to download has
* already been downloaded in the past.
*/
public void downloadExists(URL url, File file) {
if (observer == null) {
return;
}
itemsPending.remove(url);
itemsCompleted.put(url, file);
observer.update(this, new RipStatusMessage(STATUS.DOWNLOAD_WARN, url + " already saved as " + file.getAbsolutePath()));
checkIfComplete();
}
/**
* Notifies observers and updates state if all files have been ripped.
*/
@Override
protected void checkIfComplete() {
if (observer == null) {
return;
}
if (itemsPending.isEmpty()) {
super.checkIfComplete();
}
}
/**
* Sets directory to save all ripped files to.
* @param url
* URL to define how the working directory should be saved.
* @throws
* IOException
*/
@Override
public void setWorkingDir(URL url) throws IOException {
String path = Utils.getWorkingDirectory().getCanonicalPath();
if (!path.endsWith(File.separator)) {
path += File.separator;
}
String title;
if (Utils.getConfigBoolean("album_titles.save", true)) {
title = getAlbumTitle(this.url);
} else {
title = super.getAlbumTitle(this.url);
}
LOGGER.debug("Using album title '" + title + "'");
title = Utils.filesystemSafe(title);
path += title;
path = Utils.getOriginalDirectory(path) + File.separator; // check for case sensitive (unix only)
this.workingDir = new File(path);
if (!this.workingDir.exists()) {
LOGGER.info("[+] Creating directory: " + Utils.removeCWD(this.workingDir));
this.workingDir.mkdirs();
}
LOGGER.debug("Set working directory to: " + this.workingDir);
}
/**
* @return
* Integer between 0 and 100 defining the progress of the album rip.
*/
@Override
public int getCompletionPercentage() {
double total = itemsPending.size() + itemsErrored.size() + itemsCompleted.size();
return (int) (100 * ( (total - itemsPending.size()) / total));
}
/**
* @return
* Human-readable information on the status of the current rip.
*/
@Override
public String getStatusText() {
StringBuilder sb = new StringBuilder();
sb.append(getCompletionPercentage())
.append("% ")
.append("- Pending: " ).append(itemsPending.size())
.append(", Completed: ").append(itemsCompleted.size())
.append(", Errored: " ).append(itemsErrored.size());
return sb.toString();
}
}

View File

@ -1,7 +1,11 @@
package com.rarchives.ripme.ripper;
import java.awt.Desktop;
import java.io.*;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.net.MalformedURLException;
import java.net.URL;
@ -9,21 +13,17 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Observable;
import com.rarchives.ripme.App;
import java.util.Scanner;
import org.apache.log4j.FileAppender;
import org.apache.log4j.Logger;
import org.jsoup.HttpStatusException;
import com.rarchives.ripme.App;
import com.rarchives.ripme.ui.RipStatusComplete;
import com.rarchives.ripme.ui.RipStatusHandler;
import com.rarchives.ripme.ui.RipStatusMessage;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
import java.io.File;
import java.util.Scanner;
public abstract class AbstractRipper
extends Observable
implements RipperInterface, Runnable {
@ -548,7 +548,7 @@ public abstract class AbstractRipper
public static AbstractRipper getRipper(URL url) throws Exception {
for (Constructor<?> constructor : getRipperConstructors("com.rarchives.ripme.ripper.rippers")) {
try {
AlbumRipper ripper = (AlbumRipper) constructor.newInstance(url); // by design: can throw ClassCastException
AbstractRipper ripper = (AbstractRipper) constructor.newInstance(url); // by design: can throw ClassCastException
LOGGER.debug("Found album ripper: " + ripper.getClass().getName());
return ripper;
} catch (Exception e) {

View File

@ -20,17 +20,14 @@ import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
/**
* Thread for downloading files.
* Includes retry logic, observer notifications, and other goodies.
* Thread for downloading files. Includes retry logic, observer notifications,
* and other goodies.
*/
class DownloadFileThread extends Thread {
private ResourceBundle rb = MainWindow.rb;
private static final Logger logger = Logger.getLogger(DownloadFileThread.class);
private String referrer = "";
private Map<String,String> cookies = new HashMap<>();
private Map<String, String> cookies = new HashMap<>();
private URL url;
private File saveAs;
@ -55,18 +52,19 @@ class DownloadFileThread extends Thread {
public void setReferrer(String referrer) {
this.referrer = referrer;
}
public void setCookies(Map<String,String> cookies) {
public void setCookies(Map<String, String> cookies) {
this.cookies = cookies;
}
/**
* Attempts to download the file. Retries as needed.
* Notifies observers upon completion/error/warn.
* Attempts to download the file. Retries as needed. Notifies observers upon
* completion/error/warn.
*/
public void run() {
// First thing we make sure the file name doesn't have any illegal chars in it
saveAs = new File(saveAs.getParentFile().getAbsolutePath() + File.separator + Utils.sanitizeSaveAs(saveAs.getName()));
saveAs = new File(
saveAs.getParentFile().getAbsolutePath() + File.separator + Utils.sanitizeSaveAs(saveAs.getName()));
long fileSize = 0;
int bytesTotal = 0;
int bytesDownloaded = 0;
@ -76,16 +74,18 @@ class DownloadFileThread extends Thread {
try {
observer.stopCheck();
} catch (IOException e) {
observer.downloadErrored(url, rb.getString("download.interrupted"));
observer.downloadErrored(url, Utils.getLocalizedString("download.interrupted"));
return;
}
if (saveAs.exists() && !observer.tryResumeDownload() && !getFileExtFromMIME ||
Utils.fuzzyExists(new File(saveAs.getParent()), saveAs.getName()) && getFileExtFromMIME && !observer.tryResumeDownload()) {
if (saveAs.exists() && !observer.tryResumeDownload() && !getFileExtFromMIME
|| Utils.fuzzyExists(new File(saveAs.getParent()), saveAs.getName()) && getFileExtFromMIME
&& !observer.tryResumeDownload()) {
if (Utils.getConfigBoolean("file.overwrite", false)) {
logger.info("[!] " + rb.getString("deleting.existing.file") + prettySaveAs);
logger.info("[!] " + Utils.getLocalizedString("deleting.existing.file") + prettySaveAs);
saveAs.delete();
} else {
logger.info("[!] " + rb.getString("skipping") + url + " -- " + rb.getString("file.already.exists") + ": " + prettySaveAs);
logger.info("[!] " + Utils.getLocalizedString("skipping") + url + " -- "
+ Utils.getLocalizedString("file.already.exists") + ": " + prettySaveAs);
observer.downloadExists(url, saveAs);
return;
}
@ -95,7 +95,8 @@ class DownloadFileThread extends Thread {
int tries = 0; // Number of attempts to download
do {
tries += 1;
InputStream bis = null; OutputStream fos = null;
InputStream bis = null;
OutputStream fos = null;
try {
logger.info(" Downloading file: " + urlToDownload + (tries > 0 ? " Retry #" + tries : ""));
observer.sendUpdate(STATUS.DOWNLOAD_STARTED, url.toExternalForm());
@ -104,16 +105,16 @@ class DownloadFileThread extends Thread {
HttpURLConnection huc;
if (this.url.toString().startsWith("https")) {
huc = (HttpsURLConnection) urlToDownload.openConnection();
}
else {
} else {
huc = (HttpURLConnection) urlToDownload.openConnection();
}
huc.setInstanceFollowRedirects(true);
// It is important to set both ConnectTimeout and ReadTimeout. If you don't then ripme will wait forever
// It is important to set both ConnectTimeout and ReadTimeout. If you don't then
// ripme will wait forever
// for the server to send data after connecting.
huc.setConnectTimeout(TIMEOUT);
huc.setReadTimeout(TIMEOUT);
huc.setRequestProperty("accept", "*/*");
huc.setRequestProperty("accept", "*/*");
if (!referrer.equals("")) {
huc.setRequestProperty("Referer", referrer); // Sic
}
@ -131,17 +132,18 @@ class DownloadFileThread extends Thread {
huc.setRequestProperty("Range", "bytes=" + fileSize + "-");
}
}
logger.debug(rb.getString("request.properties") + ": " + huc.getRequestProperties());
logger.debug(Utils.getLocalizedString("request.properties") + ": " + huc.getRequestProperties());
huc.connect();
int statusCode = huc.getResponseCode();
logger.debug("Status code: " + statusCode);
// If the server doesn't allow resuming downloads error out
if (statusCode != 206 && observer.tryResumeDownload() && saveAs.exists()) {
// TODO find a better way to handle servers that don't support resuming downloads then just erroring out
throw new IOException(rb.getString("server.doesnt.support.resuming.downloads"));
// TODO find a better way to handle servers that don't support resuming
// downloads then just erroring out
throw new IOException(Utils.getLocalizedString("server.doesnt.support.resuming.downloads"));
}
if (statusCode / 100 == 3) { // 3xx Redirect
if (statusCode / 100 == 3) { // 3xx Redirect
if (!redirected) {
// Don't increment retries on the first redirect
tries--;
@ -153,14 +155,17 @@ class DownloadFileThread extends Thread {
throw new IOException("Redirect status code " + statusCode + " - redirect to " + location);
}
if (statusCode / 100 == 4) { // 4xx errors
logger.error("[!] " + rb.getString("nonretriable.status.code") + " " + statusCode + " while downloading from " + url);
observer.downloadErrored(url, rb.getString("nonretriable.status.code") + " " + statusCode + " while downloading " + url.toExternalForm());
logger.error("[!] " + Utils.getLocalizedString("nonretriable.status.code") + " " + statusCode
+ " while downloading from " + url);
observer.downloadErrored(url, Utils.getLocalizedString("nonretriable.status.code") + " "
+ statusCode + " while downloading " + url.toExternalForm());
return; // Not retriable, drop out.
}
if (statusCode / 100 == 5) { // 5xx errors
observer.downloadErrored(url, rb.getString("retriable.status.code") + " " + statusCode + " while downloading " + url.toExternalForm());
observer.downloadErrored(url, Utils.getLocalizedString("retriable.status.code") + " " + statusCode
+ " while downloading " + url.toExternalForm());
// Throw exception so download can be retried
throw new IOException(rb.getString("retriable.status.code") + " " + statusCode);
throw new IOException(Utils.getLocalizedString("retriable.status.code") + " " + statusCode);
}
if (huc.getContentLength() == 503 && urlToDownload.getHost().endsWith("imgur.com")) {
// Imgur image with 503 bytes is "404"
@ -169,7 +174,8 @@ class DownloadFileThread extends Thread {
return;
}
// If the ripper is using the bytes progress bar set bytesTotal to huc.getContentLength()
// If the ripper is using the bytes progress bar set bytesTotal to
// huc.getContentLength()
if (observer.useByteProgessBar()) {
bytesTotal = huc.getContentLength();
observer.setBytesTotal(bytesTotal);
@ -190,14 +196,15 @@ class DownloadFileThread extends Thread {
logger.error("Was unable to get content type from stream");
// Try to get the file type from the magic number
byte[] magicBytes = new byte[8];
bis.read(magicBytes,0, 5);
bis.read(magicBytes, 0, 5);
bis.reset();
fileExt = Utils.getEXTFromMagic(magicBytes);
if (fileExt != null) {
saveAs = new File(saveAs.toString() + "." + fileExt);
} else {
logger.error(rb.getString("was.unable.to.get.content.type.using.magic.number"));
logger.error(rb.getString("magic.number.was") + ": " + Arrays.toString(magicBytes));
logger.error(Utils.getLocalizedString("was.unable.to.get.content.type.using.magic.number"));
logger.error(
Utils.getLocalizedString("magic.number.was") + ": " + Arrays.toString(magicBytes));
}
}
}
@ -210,21 +217,26 @@ class DownloadFileThread extends Thread {
} catch (FileNotFoundException e) {
// We do this because some filesystems have a max name length
if (e.getMessage().contains("File name too long")) {
logger.error("The filename " + saveAs.getName() + " is to long to be saved on this file system.");
logger.error("The filename " + saveAs.getName()
+ " is to long to be saved on this file system.");
logger.info("Shortening filename");
String[] saveAsSplit = saveAs.getName().split("\\.");
// Get the file extension so when we shorten the file name we don't cut off the file extension
// Get the file extension so when we shorten the file name we don't cut off the
// file extension
String fileExt = saveAsSplit[saveAsSplit.length - 1];
// The max limit for filenames on Linux with Ext3/4 is 255 bytes
logger.info(saveAs.getName().substring(0, 254 - fileExt.length()) + fileExt);
String filename = saveAs.getName().substring(0, 254 - fileExt.length()) + "." + fileExt;
// We can't just use the new file name as the saveAs because the file name doesn't include the
// We can't just use the new file name as the saveAs because the file name
// doesn't include the
// users save path, so we get the user save path from the old saveAs
saveAs = new File(saveAs.getParentFile().getAbsolutePath() + File.separator + filename);
fos = new FileOutputStream(saveAs);
} else if (saveAs.getAbsolutePath().length() > 259 && Utils.isWindows()) {
// This if is for when the file path has gone above 260 chars which windows does not allow
fos = new FileOutputStream(Utils.shortenSaveAsWindows(saveAs.getParentFile().getPath(), saveAs.getName()));
// This if is for when the file path has gone above 260 chars which windows does
// not allow
fos = new FileOutputStream(
Utils.shortenSaveAsWindows(saveAs.getParentFile().getPath(), saveAs.getName()));
}
}
}
@ -239,7 +251,7 @@ class DownloadFileThread extends Thread {
try {
observer.stopCheck();
} catch (IOException e) {
observer.downloadErrored(url, rb.getString("download.interrupted"));
observer.downloadErrored(url, Utils.getLocalizedString("download.interrupted"));
return;
}
fos.write(data, 0, bytesRead);
@ -259,27 +271,37 @@ class DownloadFileThread extends Thread {
// Download failed, break out of loop
break;
} catch (HttpStatusException hse) {
logger.debug(rb.getString("http.status.exception"), hse);
logger.debug(Utils.getLocalizedString("http.status.exception"), hse);
logger.error("[!] HTTP status " + hse.getStatusCode() + " while downloading from " + urlToDownload);
if (hse.getStatusCode() == 404 && Utils.getConfigBoolean("errors.skip404", false)) {
observer.downloadErrored(url, "HTTP status code " + hse.getStatusCode() + " while downloading " + url.toExternalForm());
observer.downloadErrored(url,
"HTTP status code " + hse.getStatusCode() + " while downloading " + url.toExternalForm());
return;
}
} catch (IOException e) {
logger.debug("IOException", e);
logger.error("[!] " + rb.getString("exception.while.downloading.file") + ": " + url + " - " + e.getMessage());
logger.error("[!] " + Utils.getLocalizedString("exception.while.downloading.file") + ": " + url + " - "
+ e.getMessage());
} finally {
// Close any open streams
try {
if (bis != null) { bis.close(); }
} catch (IOException e) { }
if (bis != null) {
bis.close();
}
} catch (IOException e) {
}
try {
if (fos != null) { fos.close(); }
} catch (IOException e) { }
if (fos != null) {
fos.close();
}
} catch (IOException e) {
}
}
if (tries > this.retries) {
logger.error("[!] " + rb.getString ("exceeded.maximum.retries") + " (" + this.retries + ") for URL " + url);
observer.downloadErrored(url, rb.getString("failed.to.download") + " " + url.toExternalForm());
logger.error("[!] " + Utils.getLocalizedString("exceeded.maximum.retries") + " (" + this.retries
+ ") for URL " + url);
observer.downloadErrored(url,
Utils.getLocalizedString("failed.to.download") + " " + url.toExternalForm());
return;
}
} while (true);

View File

@ -3,19 +3,21 @@ package com.rarchives.ripme.ripper.rippers;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
import com.rarchives.ripme.ripper.DownloadThreadPool;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Http;
import com.rarchives.ripme.utils.Utils;
public class MotherlessRipper extends AlbumRipper {
public class MotherlessRipper extends AbstractHTMLRipper {
private static final String DOMAIN = "motherless.com",
HOST = "motherless";
@ -37,6 +39,52 @@ public class MotherlessRipper extends AlbumRipper {
return url.getHost().endsWith(DOMAIN);
}
@Override
protected String getDomain() {
return DOMAIN;
}
@Override
protected Document getFirstPage() throws IOException {
return Http.url(url).referrer("http://motherless.com").get();
}
@Override
protected List<String> getURLsFromPage(Document page) {
List<String> pageURLs = new ArrayList<>();
for (Element thumb : page.select("div.thumb a.img-container")) {
if (isStopped()) {
break;
}
String thumbURL = thumb.attr("href");
if (thumbURL.contains("pornmd.com")) {
continue;
}
String url;
if (!thumbURL.startsWith("http")) {
url = "http://" + DOMAIN + thumbURL;
} else {
url = thumbURL;
}
pageURLs.add(url);
if (isThisATest()) {
break;
}
}
return pageURLs;
}
@Override
protected void downloadURL(URL url, int index) {
// Create thread for finding image at "url" page
MotherlessImageThread mit = new MotherlessImageThread(url, index);
motherlessThreadPool.addThread(mit);
}
@Override
public String getHost() {
return HOST;
@ -77,34 +125,14 @@ public class MotherlessRipper extends AlbumRipper {
}
LOGGER.info("Retrieving " + nextURL);
sendUpdate(STATUS.LOADING_RESOURCE, nextURL);
Document doc = Http.url(nextURL)
.referrer("http://motherless.com")
.get();
for (Element thumb : doc.select("div.thumb a.img-container")) {
if (isStopped()) {
break;
}
String thumbURL = thumb.attr("href");
if (thumbURL.contains("pornmd.com")) {
continue;
}
URL url;
if (!thumbURL.startsWith("http")) {
url = new URL("http://" + DOMAIN + thumbURL);
}
else {
url = new URL(thumbURL);
}
index += 1;
Document doc = getFirstPage();
List<String> URLs = getURLsFromPage(doc);
// Create thread for finding image at "url" page
MotherlessImageThread mit = new MotherlessImageThread(url, index);
motherlessThreadPool.addThread(mit);
if (isThisATest()) {
break;
}
for (String url: URLs) {
downloadURL(new URL(url), index);
index ++;
}
if (isThisATest()) {
break;
}

View File

@ -0,0 +1,65 @@
package com.rarchives.ripme.ripper.rippers;
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
import com.rarchives.ripme.utils.Http;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
public class MyreadingmangaRipper extends AbstractHTMLRipper {
public MyreadingmangaRipper(URL url) throws IOException {
super(url);
}
@Override
public String getHost() {
return "myreadingmanga";
}
@Override
public String getDomain() {
return "myreadingmanga.info";
}
@Override
public String getGID(URL url) throws MalformedURLException {
Pattern p = Pattern.compile("https://myreadingmanga.info/([a-zA-Z_\\-0-9]+)/?$");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
return m.group(1);
}
throw new MalformedURLException("Expected myreadingmanga.info URL format: "
+ "myreadingmanga.info/title - got " + url + " instead");
}
@Override
public Document getFirstPage() throws IOException {
// "url" is an instance field of the superclass
return Http.url(url).get();
}
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> result = new ArrayList<>();
for (Element el : doc.select("div.separator > img")) {
String imageSource = el.attr("data-lazy-src");
result.add(imageSource);
}
return result;
}
@Override
public void downloadURL(URL url, int index) {
addURLToDownload(url, getPrefix(index));
}
}

View File

@ -10,14 +10,10 @@ import java.util.regex.Pattern;
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
import com.rarchives.ripme.utils.Http;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import com.rarchives.ripme.ripper.AlbumRipper;
public class NewsfilterRipper extends AbstractHTMLRipper {
private static final String HOST = "newsfilter";

View File

@ -13,11 +13,8 @@ import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ripper.DownloadThreadPool;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Http;
import com.rarchives.ripme.utils.Utils;
public class NfsfwRipper extends AbstractHTMLRipper {

View File

@ -14,9 +14,7 @@ import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.ripper.DownloadThreadPool;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Http;
import com.rarchives.ripme.utils.Utils;

View File

@ -22,8 +22,7 @@ public class TwitterRipper extends AlbumRipper {
int downloadUrls = 1;
private static final String DOMAIN = "twitter.com",
HOST = "twitter";
private static final String DOMAIN = "twitter.com", HOST = "twitter";
private static final int MAX_REQUESTS = Utils.getConfigInteger("twitter.max_requests", 10);
private static final boolean RIP_RETWEETS = Utils.getConfigBoolean("twitter.rip_retweets", true);
@ -34,8 +33,7 @@ public class TwitterRipper extends AlbumRipper {
private String accessToken;
private enum ALBUM_TYPE {
ACCOUNT,
SEARCH
ACCOUNT, SEARCH
}
private ALBUM_TYPE albumType;
@ -75,13 +73,10 @@ public class TwitterRipper extends AlbumRipper {
}
private void getAccessToken() throws IOException {
Document doc = Http.url("https://api.twitter.com/oauth2/token")
.ignoreContentType()
Document doc = Http.url("https://api.twitter.com/oauth2/token").ignoreContentType()
.header("Authorization", "Basic " + authKey)
.header("Content-Type", "application/x-www-form-urlencoded;charset=UTF-8")
.header("User-agent", "ripe and zipe")
.data("grant_type", "client_credentials")
.post();
.header("User-agent", "ripe and zipe").data("grant_type", "client_credentials").post();
String body = doc.body().html().replaceAll("&quot;", "\"");
try {
JSONObject json = new JSONObject(body);
@ -94,17 +89,13 @@ public class TwitterRipper extends AlbumRipper {
private void checkRateLimits(String resource, String api) throws IOException {
Document doc = Http.url("https://api.twitter.com/1.1/application/rate_limit_status.json?resources=" + resource)
.ignoreContentType()
.header("Authorization", "Bearer " + accessToken)
.ignoreContentType().header("Authorization", "Bearer " + accessToken)
.header("Content-Type", "application/x-www-form-urlencoded;charset=UTF-8")
.header("User-agent", "ripe and zipe")
.get();
.header("User-agent", "ripe and zipe").get();
String body = doc.body().html().replaceAll("&quot;", "\"");
try {
JSONObject json = new JSONObject(body);
JSONObject stats = json.getJSONObject("resources")
.getJSONObject(resource)
.getJSONObject(api);
JSONObject stats = json.getJSONObject("resources").getJSONObject(resource).getJSONObject(api);
int remaining = stats.getInt("remaining");
LOGGER.info(" Twitter " + resource + " calls remaining: " + remaining);
if (remaining < 20) {
@ -120,23 +111,17 @@ public class TwitterRipper extends AlbumRipper {
private String getApiURL(Long maxID) {
StringBuilder req = new StringBuilder();
switch (albumType) {
case ACCOUNT:
req.append("https://api.twitter.com/1.1/statuses/user_timeline.json")
.append("?screen_name=" + this.accountName)
.append("&include_entities=true")
.append("&exclude_replies=true")
.append("&trim_user=true")
.append("&count=" + 200)
.append("&tweet_mode=extended");
break;
case SEARCH:
req.append("https://api.twitter.com/1.1/search/tweets.json")
.append("?q=" + this.searchText)
.append("&include_entities=true")
.append("&result_type=recent")
.append("&count=100")
.append("&tweet_mode=extended");
break;
case ACCOUNT:
req.append("https://api.twitter.com/1.1/statuses/user_timeline.json")
.append("?screen_name=" + this.accountName).append("&include_entities=true")
.append("&exclude_replies=true").append("&trim_user=true").append("&count=" + 200)
.append("&tweet_mode=extended");
break;
case SEARCH:
req.append("https://api.twitter.com/1.1/search/tweets.json").append("?q=" + this.searchText)
.append("&include_entities=true").append("&result_type=recent").append("&count=100")
.append("&tweet_mode=extended");
break;
}
if (maxID > 0) {
req.append("&max_id=" + Long.toString(maxID));
@ -147,12 +132,9 @@ public class TwitterRipper extends AlbumRipper {
private List<JSONObject> getTweets(String url) throws IOException {
List<JSONObject> tweets = new ArrayList<>();
LOGGER.info(" Retrieving " + url);
Document doc = Http.url(url)
.ignoreContentType()
.header("Authorization", "Bearer " + accessToken)
Document doc = Http.url(url).ignoreContentType().header("Authorization", "Bearer " + accessToken)
.header("Content-Type", "application/x-www-form-urlencoded;charset=UTF-8")
.header("User-agent", "ripe and zipe")
.get();
.header("User-agent", "ripe and zipe").get();
String body = doc.body().html().replaceAll("&quot;", "\"");
Object jsonObj = new JSONTokener(body).nextValue();
JSONArray statuses;
@ -178,7 +160,7 @@ public class TwitterRipper extends AlbumRipper {
LOGGER.error("XXX Tweet doesn't have entitites");
return 0;
}
if (!RIP_RETWEETS && tweet.has("retweeted_status")) {
LOGGER.info("Skipping a retweet as twitter.rip_retweet is set to false.");
return 0;
@ -194,19 +176,22 @@ public class TwitterRipper extends AlbumRipper {
for (int i = 0; i < medias.length(); i++) {
media = (JSONObject) medias.get(i);
url = media.getString("media_url");
if (media.getString("type").equals("video")) {
if (media.getString("type").equals("video") || media.getString("type").equals("animated_gif")) {
JSONArray variants = media.getJSONObject("video_info").getJSONArray("variants");
int largestBitrate = 0;
String urlToDownload = null;
// Loop over all the video options and find the biggest video
for (int j = 0; j < medias.length(); j++) {
JSONObject variant = (JSONObject) variants.get(i);
for (int j = 0; j < variants.length(); j++) {
JSONObject variant = (JSONObject) variants.get(j);
LOGGER.info(variant);
// If the video doesn't have a bitrate it's a m3u8 file we can't download
if (variant.has("bitrate")) {
if (variant.getInt("bitrate") > largestBitrate) {
largestBitrate = variant.getInt("bitrate");
urlToDownload = variant.getString("url");
} else if (media.getString("type").equals("animated_gif")) {
// If the type if animated_gif the bitrate doesn't matter
urlToDownload = variant.getString("url");
}
}
}
@ -230,12 +215,11 @@ public class TwitterRipper extends AlbumRipper {
}
}
return parsedCount;
}
public String getPrefix(int index) {
return String.format("%03d_", index);
return Utils.getConfigBoolean("download.save_order", true) ? String.format("%03d_", index) : "";
}
@Override
@ -243,12 +227,12 @@ public class TwitterRipper extends AlbumRipper {
getAccessToken();
switch (albumType) {
case ACCOUNT:
checkRateLimits("statuses", "/statuses/user_timeline");
break;
case SEARCH:
checkRateLimits("search", "/search/tweets");
break;
case ACCOUNT:
checkRateLimits("statuses", "/statuses/user_timeline");
break;
case SEARCH:
checkRateLimits("search", "/search/tweets");
break;
}
Long lastMaxID = 0L;
@ -260,9 +244,7 @@ public class TwitterRipper extends AlbumRipper {
break;
}
LOGGER.debug("Twitter response #" + (i + 1) + " Tweets:\n" + tweets);
if (tweets.size() == 1 &&
lastMaxID.equals(tweets.get(0).getString("id_str"))
) {
if (tweets.size() == 1 && lastMaxID.equals(tweets.get(0).getString("id_str"))) {
LOGGER.info(" No more tweet found.");
break;
}
@ -299,26 +281,22 @@ public class TwitterRipper extends AlbumRipper {
@Override
public String getGID(URL url) throws MalformedURLException {
switch (albumType) {
case ACCOUNT:
return "account_" + accountName;
case SEARCH:
StringBuilder gid = new StringBuilder();
for (int i = 0; i < searchText.length(); i++) {
char c = searchText.charAt(i);
// Ignore URL-encoded chars
if (c == '%') {
gid.append('_');
i += 2;
// Ignore non-alphanumeric chars
} else if (
(c >= 'a' && c <= 'z')
|| (c >= 'A' && c <= 'Z')
|| (c >= '0' && c <= '9')
) {
gid.append(c);
}
case ACCOUNT:
return "account_" + accountName;
case SEARCH:
StringBuilder gid = new StringBuilder();
for (int i = 0; i < searchText.length(); i++) {
char c = searchText.charAt(i);
// Ignore URL-encoded chars
if (c == '%') {
gid.append('_');
i += 2;
// Ignore non-alphanumeric chars
} else if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9')) {
gid.append(c);
}
return "search_" + gid.toString();
}
return "search_" + gid.toString();
}
throw new MalformedURLException("Could not decide type of URL (search/account): " + url);
}

View File

@ -3,33 +3,171 @@ package com.rarchives.ripme.ripper.rippers;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.rarchives.ripme.ripper.AbstractJSONRipper;
import org.json.JSONArray;
import org.json.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import com.rarchives.ripme.ripper.AlbumRipper;
import com.rarchives.ripme.utils.Http;
import com.rarchives.ripme.utils.Utils;
public class VkRipper extends AlbumRipper {
public class VkRipper extends AbstractJSONRipper {
private static final String DOMAIN = "vk.com",
HOST = "vk";
enum RipType { VIDEO, IMAGE }
private RipType RIP_TYPE;
private String oid;
public VkRipper(URL url) throws IOException {
super(url);
}
@Override
public String getHost() {
return HOST;
}
@Override
protected String getDomain() {
return DOMAIN;
}
@Override
protected JSONObject getFirstPage() throws IOException {
if (RIP_TYPE == RipType.VIDEO) {
oid = getGID(this.url).replace("videos", "");
String u = "http://vk.com/al_video.php";
Map<String, String> postData = new HashMap<>();
postData.put("al", "1");
postData.put("act", "load_videos_silent");
postData.put("offset", "0");
postData.put("oid", oid);
Document doc = Http.url(u)
.referrer(this.url)
.ignoreContentType()
.data(postData)
.post();
String[] jsonStrings = doc.toString().split("<!>");
return new JSONObject(jsonStrings[jsonStrings.length - 1]);
} else {
Map<String,String> photoIDsToURLs = new HashMap<>();
int offset = 0;
while (true) {
LOGGER.info(" Retrieving " + this.url);
Map<String,String> postData = new HashMap<>();
postData.put("al", "1");
postData.put("offset", Integer.toString(offset));
postData.put("part", "1");
Document doc = Http.url(this.url)
.referrer(this.url)
.ignoreContentType()
.data(postData)
.post();
String body = doc.toString();
if (!body.contains("<div")) {
break;
}
body = body.substring(body.indexOf("<div"));
doc = Jsoup.parseBodyFragment(body);
List<Element> elements = doc.select("a");
Set<String> photoIDsToGet = new HashSet<>();
for (Element a : elements) {
if (!a.attr("onclick").contains("showPhoto('")) {
LOGGER.error("a: " + a);
continue;
}
String photoID = a.attr("onclick");
photoID = photoID.substring(photoID.indexOf("showPhoto('") + "showPhoto('".length());
photoID = photoID.substring(0, photoID.indexOf("'"));
if (!photoIDsToGet.contains(photoID)) {
photoIDsToGet.add(photoID);
}
}
for (String photoID : photoIDsToGet) {
if (!photoIDsToURLs.containsKey(photoID)) {
try {
photoIDsToURLs.putAll(getPhotoIDsToURLs(photoID));
} catch (IOException e) {
LOGGER.error("Exception while retrieving photo id " + photoID, e);
continue;
}
}
if (!photoIDsToURLs.containsKey(photoID)) {
LOGGER.error("Could not find URL for photo ID: " + photoID);
continue;
}
if (isStopped() || isThisATest()) {
break;
}
}
if (elements.size() < 40 || isStopped() || isThisATest()) {
break;
}
offset += elements.size();
}
// Slight hack to make this into effectively a JSON ripper
return new JSONObject(photoIDsToURLs);
}
}
@Override
protected List<String> getURLsFromJSON(JSONObject page) {
List<String> pageURLs = new ArrayList<>();
if (RIP_TYPE == RipType.VIDEO) {
JSONArray videos = page.getJSONArray("all");
LOGGER.info("Found " + videos.length() + " videos");
for (int i = 0; i < videos.length(); i++) {
JSONArray jsonVideo = videos.getJSONArray(i);
int vidid = jsonVideo.getInt(1);
String videoURL;
try {
videoURL = com.rarchives.ripme.ripper.rippers.video.VkRipper.getVideoURLAtPage(
"http://vk.com/video" + oid + "_" + vidid);
} catch (IOException e) {
LOGGER.error("Error while ripping video id: " + vidid);
return pageURLs;
}
pageURLs.add(videoURL);
}
} else {
Iterator keys = page.keys();
while (keys.hasNext()) {
pageURLs.add(page.getString((String) keys.next()));
}
}
return pageURLs;
}
@Override
protected void downloadURL(URL url, int index) {
if (RIP_TYPE == RipType.VIDEO) {
String prefix = "";
if (Utils.getConfigBoolean("download.save_order", true)) {
prefix = String.format("%03d_", index + 1);
}
addURLToDownload(url, prefix);
try {
Thread.sleep(500);
} catch (InterruptedException e) {
LOGGER.error("Interrupted while waiting to fetch next video URL", e);
}
} else {
addURLToDownload(url);
}
}
@Override
public boolean canRip(URL url) {
if (!url.getHost().endsWith(DOMAIN)) {
@ -48,115 +186,19 @@ public class VkRipper extends AlbumRipper {
@Override
public void rip() throws IOException {
if (this.url.toExternalForm().contains("/videos")) {
ripVideos();
RIP_TYPE = RipType.VIDEO;
JSONObject json = getFirstPage();
List<String> URLs = getURLsFromJSON(json);
for (int index = 0; index < URLs.size(); index ++) {
downloadURL(new URL(URLs.get(index)), index);
}
waitForThreads();
}
else {
ripImages();
RIP_TYPE = RipType.IMAGE;
}
}
private void ripVideos() throws IOException {
String oid = getGID(this.url).replace("videos", "");
String u = "http://vk.com/al_video.php";
Map<String,String> postData = new HashMap<>();
postData.put("al", "1");
postData.put("act", "load_videos_silent");
postData.put("offset", "0");
postData.put("oid", oid);
Document doc = Http.url(u)
.referrer(this.url)
.ignoreContentType()
.data(postData)
.post();
String[] jsonStrings = doc.toString().split("<!>");
JSONObject json = new JSONObject(jsonStrings[jsonStrings.length - 1]);
JSONArray videos = json.getJSONArray("all");
LOGGER.info("Found " + videos.length() + " videos");
for (int i = 0; i < videos.length(); i++) {
JSONArray jsonVideo = videos.getJSONArray(i);
int vidid = jsonVideo.getInt(1);
String videoURL = com.rarchives.ripme.ripper.rippers.video.VkRipper.getVideoURLAtPage(
"http://vk.com/video" + oid + "_" + vidid);
String prefix = "";
if (Utils.getConfigBoolean("download.save_order", true)) {
prefix = String.format("%03d_", i + 1);
}
addURLToDownload(new URL(videoURL), prefix);
try {
Thread.sleep(500);
} catch (InterruptedException e) {
LOGGER.error("Interrupted while waiting to fetch next video URL", e);
break;
}
}
waitForThreads();
}
private void ripImages() throws IOException {
Map<String,String> photoIDsToURLs = new HashMap<>();
int offset = 0;
while (true) {
LOGGER.info(" Retrieving " + this.url);
// al=1&offset=80&part=1
Map<String,String> postData = new HashMap<>();
postData.put("al", "1");
postData.put("offset", Integer.toString(offset));
postData.put("part", "1");
Document doc = Http.url(this.url)
.referrer(this.url)
.ignoreContentType()
.data(postData)
.post();
String body = doc.toString();
if (!body.contains("<div")) {
break;
}
body = body.substring(body.indexOf("<div"));
doc = Jsoup.parseBodyFragment(body);
List<Element> elements = doc.select("a");
Set<String> photoIDsToGet = new HashSet<>();
for (Element a : elements) {
if (!a.attr("onclick").contains("showPhoto('")) {
LOGGER.error("a: " + a);
continue;
}
String photoID = a.attr("onclick");
photoID = photoID.substring(photoID.indexOf("showPhoto('") + "showPhoto('".length());
photoID = photoID.substring(0, photoID.indexOf("'"));
if (!photoIDsToGet.contains(photoID)) {
photoIDsToGet.add(photoID);
}
}
for (String photoID : photoIDsToGet) {
if (!photoIDsToURLs.containsKey(photoID)) {
try {
photoIDsToURLs.putAll(getPhotoIDsToURLs(photoID));
} catch (IOException e) {
LOGGER.error("Exception while retrieving photo id " + photoID, e);
continue;
}
}
if (!photoIDsToURLs.containsKey(photoID)) {
LOGGER.error("Could not find URL for photo ID: " + photoID);
continue;
}
String url = photoIDsToURLs.get(photoID);
addURLToDownload(new URL(url));
if (isStopped() || isThisATest()) {
break;
}
}
if (elements.size() < 40 || isStopped() || isThisATest()) {
break;
}
offset += elements.size();
}
waitForThreads();
}
private Map<String,String> getPhotoIDsToURLs(String photoID) throws IOException {
Map<String,String> photoIDsToURLs = new HashMap<>();
Map<String,String> postData = new HashMap<>();
@ -191,11 +233,6 @@ public class VkRipper extends AlbumRipper {
return photoIDsToURLs;
}
@Override
public String getHost() {
return HOST;
}
@Override
public String getGID(URL url) throws MalformedURLException {
Pattern p = Pattern.compile("^https?://(www\\.)?vk\\.com/(photos|album|videos)-?([a-zA-Z0-9_]+).*$");

View File

@ -10,13 +10,15 @@ import javax.swing.Action;
import javax.swing.JPopupMenu;
import javax.swing.JTable;
import com.rarchives.ripme.utils.Utils;
class HistoryMenuMouseListener extends MouseAdapter {
private JPopupMenu popup = new JPopupMenu();
private JTable tableComponent;
@SuppressWarnings("serial")
public HistoryMenuMouseListener() {
Action checkAllAction = new AbstractAction("Check All") {
Action checkAllAction = new AbstractAction(Utils.getLocalizedString("history.check.all")) {
@Override
public void actionPerformed(ActionEvent ae) {
for (int row = 0; row < tableComponent.getRowCount(); row++) {
@ -26,7 +28,7 @@ class HistoryMenuMouseListener extends MouseAdapter {
};
popup.add(checkAllAction);
Action uncheckAllAction = new AbstractAction("Check None") {
Action uncheckAllAction = new AbstractAction(Utils.getLocalizedString("history.check.none")) {
@Override
public void actionPerformed(ActionEvent ae) {
for (int row = 0; row < tableComponent.getRowCount(); row++) {
@ -38,7 +40,7 @@ class HistoryMenuMouseListener extends MouseAdapter {
popup.addSeparator();
Action checkSelected = new AbstractAction("Check Selected") {
Action checkSelected = new AbstractAction(Utils.getLocalizedString("history.check.selected")) {
@Override
public void actionPerformed(ActionEvent ae) {
for (int row : tableComponent.getSelectedRows()) {
@ -48,7 +50,7 @@ class HistoryMenuMouseListener extends MouseAdapter {
};
popup.add(checkSelected);
Action uncheckSelected = new AbstractAction("Uncheck Selected") {
Action uncheckSelected = new AbstractAction(Utils.getLocalizedString("history.uncheck.selected")) {
@Override
public void actionPerformed(ActionEvent ae) {
for (int row : tableComponent.getSelectedRows()) {

View File

@ -72,7 +72,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
private static JFrame mainFrame;
private static JTextField ripTextfield;
private static JButton ripButton, stopButton;
private static JLabel statusLabel;
private static JButton openButton;
@ -94,8 +93,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
private static JTable historyTable;
private static AbstractTableModel historyTableModel;
private static JButton historyButtonRemove, historyButtonClear, historyButtonRerip;
// Queue
public static JButton optionQueue;
@ -142,35 +139,17 @@ public final class MainWindow implements Runnable, RipStatusHandler {
private static AbstractRipper ripper;
private void updateQueue(DefaultListModel<Object> model) {
if (model == null)
model = queueListModel;
if (model.size() > 0){
if (model.size() > 0) {
Utils.setConfigList("queue", (Enumeration<Object>) model.elements());
Utils.saveConfig();
}
MainWindow.optionQueue.setText(String.format("%s%s", Utils.getLocalizedString("queue"),
model.size() == 0 ? "" : "(" + model.size() + ")"));
MainWindow.optionQueue.setText(String.format("%s%s", Utils.getLocalizedString("queue"),
model.size() == 0 ? "" : "(" + model.size() + ")"));
}
}
private void updateQueue() {
@ -192,7 +171,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
return checkbox;
}
public static void addUrlToQueue(String url) {
queueListModel.addElement(url);
}
@ -284,10 +262,8 @@ public final class MainWindow implements Runnable, RipStatusHandler {
private boolean isCollapsed() {
return (!logPanel.isVisible() && !historyPanel.isVisible() && !queuePanel.isVisible()
&& !configurationPanel.isVisible());
}
private void createUI(Container pane) {
@ -510,7 +486,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
queueList.addMouseListener(
queueMenuMouseListener = new QueueMenuMouseListener(d -> updateQueue(queueListModel)));
JScrollPane queueListScroll = new JScrollPane(queueList, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED,
JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
for (String item : Utils.getConfigList("queue")) {
queueListModel.addElement(item);
@ -595,9 +571,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
addItemToConfigGridBagConstraints(gbc, 11, configSelectLangComboBox, configUrlFileChooserButton);
addItemToConfigGridBagConstraints(gbc, 12, configSaveDirLabel, configSaveDirButton);
emptyPanel = new JPanel();
emptyPanel.setPreferredSize(new Dimension(0, 0));
emptyPanel.setSize(0, 0);
@ -863,7 +836,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
saveHistory();
});
} else {
Utils.clearURLHistory();
HISTORY.clear();
try {
@ -878,8 +850,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
historyButtonRerip.addActionListener(event -> {
if (HISTORY.isEmpty()) {
JOptionPane.showMessageDialog(null, Utils.getLocalizedString("history.load.none"), "RipMe Error",
JOptionPane.ERROR_MESSAGE);
return;
}
@ -892,9 +862,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
}
if (added == 0) {
JOptionPane.showMessageDialog(null, Utils.getLocalizedString("history.load.none.checked"),
"RipMe Error", JOptionPane.ERROR_MESSAGE);
}
});
@ -967,7 +935,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
}
}
} catch (IOException e) {
LOGGER.error("Error reading file " + e.getMessage());
}
@ -1066,9 +1033,8 @@ public final class MainWindow implements Runnable, RipStatusHandler {
MenuItem trayMenuAbout = new MenuItem("About " + mainFrame.getTitle());
trayMenuAbout.addActionListener(arg0 -> {
StringBuilder about = new StringBuilder();
about.append("<html><h1>").append(mainFrame.getTitle()).append("</h1>");
about.append("Download albums from various websites:");
try {
List<String> rippers = Utils.getListOfAlbumRippers();
@ -1106,10 +1072,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
about.append("</html>");
int response = JOptionPane.showConfirmDialog(null, about.toString(), mainFrame.getTitle(),
JOptionPane.YES_NO_OPTION, JOptionPane.PLAIN_MESSAGE, new ImageIcon(mainIcon));
if (response == JOptionPane.YES_OPTION) {
try {
Desktop.getDesktop().browse(URI.create("http://github.com/ripmeapp/ripme"));
@ -1158,8 +1120,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
private void toggleTrayClick() {
if (mainFrame.getExtendedState() == JFrame.ICONIFIED || !mainFrame.isActive() || !mainFrame.isVisible()) {
mainFrame.setVisible(true);
mainFrame.setAlwaysOnTop(true);
mainFrame.setAlwaysOnTop(false);
@ -1212,10 +1172,7 @@ public final class MainWindow implements Runnable, RipStatusHandler {
LOGGER.error("Failed to load history from file " + historyFile, e);
JOptionPane.showMessageDialog(null,
String.format(Utils.getLocalizedString("history.load.failed.warning"), e.getMessage()),
"RipMe - history load failure", JOptionPane.ERROR_MESSAGE);
}
} else {
@ -1254,7 +1211,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
}
}
private void ripNextAlbum() {
isRipping = true;
// Save current state of queue to configuration.
@ -1323,7 +1279,6 @@ public final class MainWindow implements Runnable, RipStatusHandler {
ripper.setObserver(this);
Thread t = new Thread(ripper);
if (configShowPopup.isSelected() && (!mainFrame.isVisible() || !mainFrame.isActive())) {
mainFrame.toFront();
mainFrame.setAlwaysOnTop(true);
trayIcon.displayMessage(mainFrame.getTitle(), "Started ripping " + ripper.getURL().toExternalForm(),
@ -1498,8 +1453,8 @@ public final class MainWindow implements Runnable, RipStatusHandler {
}
/*
* content key %path% the path to the album folder %url% is the album url
*
*
*/
if (Utils.getConfigBoolean("enable.finish.command", false)) {
try {
@ -1511,10 +1466,8 @@ public final class MainWindow implements Runnable, RipStatusHandler {
// https://stackoverflow.com/questions/5711084/java-runtime-getruntime-getting-output-from-executing-a-command-line-program
Process proc = Runtime.getRuntime().exec(commandToRun);
BufferedReader stdInput = new BufferedReader(new InputStreamReader(proc.getInputStream()));
BufferedReader stdError = new BufferedReader(new InputStreamReader(proc.getErrorStream()));
// read the output from the command
LOGGER.info("Command output:\n");

View File

@ -2,18 +2,27 @@ package com.rarchives.ripme.utils;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLDecoder;
import java.nio.ByteBuffer;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@ -21,6 +30,9 @@ import java.util.MissingResourceException;
import java.util.ResourceBundle;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
@ -40,6 +52,8 @@ import org.apache.log4j.PropertyConfigurator;
*/
public class Utils {
private static final Pattern pattern = Pattern.compile("LabelsBundle_(?<lang>[A-Za-z_]+).properties");
private static final String DEFAULT_LANG = "en_US";
private static final String RIP_DIRECTORY = "rips";
private static final String CONFIG_FILE = "rip.properties";
private static final String OS = System.getProperty("os.name").toLowerCase();
@ -50,6 +64,8 @@ public class Utils {
private static HashMap<String, HashMap<String, String>> cookieCache;
private static HashMap<ByteBuffer, String> magicHash = new HashMap<>();
private static ResourceBundle resourceBundle = null;
static {
cookieCache = new HashMap<>();
@ -83,6 +99,8 @@ public class Utils {
} catch (Exception e) {
LOGGER.error("[!] Failed to load properties file from " + CONFIG_FILE, e);
}
resourceBundle = getResourceBundle(null);
}
/**
@ -737,6 +755,52 @@ public class Utils {
}
}
public static void setLanguage(String langSelect) {
resourceBundle = getResourceBundle(langSelect);
}
public static String getSelectedLanguage() {
return resourceBundle.getLocale().toString();
}
// All the langs ripme has been translated into
public static String[] getSupportedLanguages() {
ArrayList<Path> filesList = new ArrayList<>();
try {
URI uri = Utils.class.getResource("/rip.properties").toURI();
Path myPath;
if (uri.getScheme().equals("jar")) {
FileSystem fileSystem = FileSystems.newFileSystem(uri, Collections.<String, Object>emptyMap());
myPath = fileSystem.getPath("/");
} else {
myPath = Paths.get(uri).getParent();
}
Files.walk(myPath, 1).filter(p -> p.toString().contains("LabelsBundle_")).distinct()
.forEach(filesList::add);
String[] langs = new String[filesList.size()];
for (int i = 0; i < filesList.size(); i++) {
Matcher matcher = pattern.matcher(filesList.get(i).toString());
if (matcher.find())
langs[i] = matcher.group("lang");
}
return langs;
} catch (Exception e) {
e.printStackTrace();
// On error return default language
return new String[] { DEFAULT_LANG };
}
}
public static String getLocalizedString(String key) {
LOGGER.debug(String.format("Getting key %s in %s value %s", key, getSelectedLanguage(),
resourceBundle.getString(key)));
return resourceBundle.getString(key);
}
/**
* Formats and reuturns the status text for rippers using the byte progress bar
*

View File

@ -2,11 +2,11 @@ Log = Log
History = History
created = created
modified = modified
Queue = Queue
queue = Queue
Configuration = Configuration
open = Open
# Keys for the Configuration menu
current.version = Current version
check.for.updates = Check for updates
auto.update = Auto-update?
@ -27,19 +27,37 @@ restore.window.position = Restore window position
remember.url.history = Remember URL history
loading.history.from = Loading history from
# Misc UI keys
# Queue keys
queue.remove.all = Remove All
queue.validation = Are you sure you want to remove all elements from the queue?
queue.remove.selected = Remove Selected
loading.history.from.configuration = Loading history from configuration
interrupted.while.waiting.to.rip.next.album = Interrupted while waiting to rip next album
inactive = Inactive
# History
re-rip.checked = Re-rip Checked
remove = Remove
clear = Clear
history.check.all = Check All
history.check.none = Check None
history.check.selected = Check Selected
history.uncheck.selected = Uncheck Selected
history.load.failed.warning = RipMe failed to load the history file at historyFile.getAbsolutePath() \n\nError: %s\n\nClosing RipMe will automatically overwrite the contents of this file,\nso you may want to back the file up before closing RipMe!
history.load.none = There are no history entries to re-rip. Rip some albums first
history.load.none.checked = No history entries have been 'Checked' Check an entry by clicking the checkbox to the right of the URL or Right-click a URL to check/uncheck all items
# TrayIcon
tray.show = Show
tray.hide = Hide
tray.autorip = Clipboard Autorip
tray.exit = Exit
# Misc UI keys
loading.history.from.configuration = Loading history from configuration
interrupted.while.waiting.to.rip.next.album = Interrupted while waiting to rip next album
inactive = Inactive
download.url.list = Download url list
select.save.dir = Select Save Directory
# Keys for the logs generated by DownloadFileThread
nonretriable.status.code = Non-retriable status code
retriable.status.code = Retriable status code
server.doesnt.support.resuming.downloads = Server doesn't support resuming downloads

View File

@ -2,7 +2,7 @@ Log = \u0645\u0644\u0641 \u0627\u0644\u062A\u062A\u0628\u0639
History = \u0630\u0627\u0643\u0631\u0629 \u0627\u0644\u0627\u0633\u062A\u062E\u062F\u0627\u0645
created = \u0627\u0644\u0627\u0646\u0634\u0627\u0621
modified = \u062A\u0645 \u0627\u0644\u062A\u0639\u062F\u064A\u0644
Queue = \u0637\u0627\u0628\u0648\u0631
queue = \u0637\u0627\u0628\u0648\u0631
Configuration = \u062A\u0631\u062A\u064A\u0628
# Keys for the Configuration menu

View File

@ -1,8 +1,8 @@
Log = Log
Log = Log
History = Verlauf
created = erstellt
modified = geändert
Queue = Queue
queue = Queue
Configuration = Konfiguration
# Keys for the Configuration menu

View File

@ -1,57 +1 @@
Log = Log
History = History
created = created
modified = modified
Queue = Queue
Configuration = Configuration
# Keys for the Configuration menu
current.version = Current version
check.for.updates = Check for updates
auto.update = Auto-update?
max.download.threads = Maximum download threads:
timeout.mill = Timeout (in milliseconds):
retry.download.count = Retry download count
overwrite.existing.files = Overwrite existing files?
sound.when.rip.completes = Sound when rip completes
preserve.order = Preserve order
save.logs = Save logs
notification.when.rip.starts = Notification when rip starts
save.urls.only = Save URLs only
save.album.titles = Save album titles
autorip.from.clipboard = Autorip from Clipboard
save.descriptions = Save descriptions
prefer.mp4.over.gif = Prefer MP4 over GIF
restore.window.position = Restore window position
remember.url.history = Remember URL history
loading.history.from = Loading history from
# Misc UI keys
loading.history.from.configuration = Loading history from configuration
interrupted.while.waiting.to.rip.next.album = Interrupted while waiting to rip next album
inactive = Inactive
re-rip.checked = Re-rip Checked
remove = Remove
clear = Clear
download.url.list = Download url list
select.save.dir = Select Save Directory
# Keys for the logs generated by DownloadFileThread
nonretriable.status.code = Non-retriable status code
retriable.status.code = Retriable status code
server.doesnt.support.resuming.downloads = Server doesn't support resuming downloads
# A "magic number" can also be called a file signature
was.unable.to.get.content.type.using.magic.number = Was unable to get content type using magic number
magic.number.was = Magic number was
deleting.existing.file = Deleting existing file
request.properties = Request properties
download.interrupted = Download interrupted
exceeded.maximum.retries = Exceeded maximum retries
http.status.exception = HTTP status exception
exception.while.downloading.file = Exception while downloading file
failed.to.download = Failed to download
skipping = Skipping
file.already.exists = file already exists
# This need to be empty since EN is the default language in the bundles

View File

@ -2,7 +2,7 @@ Log = Log
History = Historia
created = creado
modified = modificado
Queue = Cola
queue = Cola
Configuration = Configuracion
# Keys for the Configuration menu
@ -27,6 +27,11 @@ restore.window.position = Restaurar posicion de ventana
remember.url.history = Recordar historia URL
loading.history.from = Cargando historia desde
# Queue keys
queue.remove.all = Eliminar todos los elementos
queue.validation = ¿Esta seguro que desea eliminar todos los elementos de la lista?
queue.remove.selected = Eliminar elementos seleccionados
# Misc UI keys
loading.history.from.configuration = Cargando historia desde la configuracion

View File

@ -2,7 +2,7 @@ Log = Logi
History = Historia
created = luotu
modified = muokattu
Queue = Jono
queue = Jono
Configuration = Asetukset
# Keys for the Configuration menu

View File

@ -2,7 +2,7 @@ Log = Journal
History = Historique
created = créé le
modified = modifié le
Queue = File d'attente
queue = File d'attente
Configuration = Configuration
# Keys for the Configuration menu

View File

@ -2,7 +2,7 @@ Log = Log
History = Riwayat
created = dibuat pada
modified = diubah pada
Queue = Antrian
queue = Antrian
Configuration = Pengaturan
# Keys for the Configuration menu

View File

@ -2,7 +2,7 @@ Log = Log
History = Cronologia
created = creato
modified = modificato
Queue = Coda
queue = Coda
Configuration = Configurazione
# Keys for the Configuration menu
@ -44,14 +44,14 @@ nonretriable.status.code = Codice di stato irreversibile
retriable.status.code = Codice di stato reversibile
server.doesnt.support.resuming.downloads = Il server non supporta la ripresa dei download
# A "magic number" can also be called a file signature
was.unable.to.get.content.type.using.magic.number = Non è stato possibile ottenere il tipo del contenuto usando magic number
was.unable.to.get.content.type.using.magic.number = Non <EFBFBD> stato possibile ottenere il tipo del contenuto usando magic number
magic.number.was = Magic number era
deleting.existing.file = Cancellazione file esistente
request.properties = Richiesta proprietà
request.properties = Richiesta propriet<EFBFBD>
download.interrupted = Download interrotto
exceeded.maximum.retries = Superato il numero massimo di tentativi
http.status.exception = Eccezione stato HTTP
exception.while.downloading.file = Eccezione durante il download del file
failed.to.download = Download non riuscito
skipping = Saltare
file.already.exists = il file esiste già
file.already.exists = il file esiste gi<EFBFBD>

View File

@ -2,7 +2,7 @@ Log = \uB85C\uADF8
History = \uD788\uC2A4\uD1A0\uB9AC
created = \uC0DD\uC0B0\uB428
modified = \uC218\uC815\uB428
Queue = \uB300\uAE30\uC5F4
queue = \uB300\uAE30\uC5F4
Configuration = \uAD6C\uC131
# Keys for the Configuration menu

View File

@ -2,7 +2,7 @@ Log = Logboek
History = Geschiedenis
created = gemaakt
modified = aangepast
Queue = Wachtrij
queue = Wachtrij
Configuration = Configuratie
# Keys for the Configuration menu

View File

@ -1,8 +1,8 @@
Log = Logi
Log = Logi
History = Historia
created = Stworzono
modified = Zmodyfikowano
Queue = Kolejka
queue = Kolejka
Configuration = Konfiguracja
# Keys for the Configuration menu

View File

@ -2,7 +2,7 @@ Log = Loki
History = Historriijja
created = luatu
modified = muakat
Queue = Jono
queue = Jono
Configuration = Assetuksse
# Keys for the Configuration menu

View File

@ -2,7 +2,7 @@ Log = Registro
History = Histórico
created = criado
modified = modificado
Queue = Fila
queue = Fila
Configuration = Configuração
# Keys for the Configuration menu

View File

@ -2,7 +2,7 @@ Log = Registo
History = Histórico
created = criado
modified = modificado
Queue = Fila
queue = Fila
Configuration = Configuração
# Keys for the Configuration menu

View File

@ -1,8 +1,8 @@
Log = Лог
Log = Лог
History = История
created = создано
modified = изменено
Queue = Очередь
queue = Очередь
Configuration = Настройки
# Keys for the Configuration menu

View File

@ -0,0 +1,13 @@
package com.rarchives.ripme.tst.ripper.rippers;
import java.io.IOException;
import java.net.URL;
import com.rarchives.ripme.ripper.rippers.MyreadingmangaRipper;
public class MyreadingmangaRipperTest extends RippersTest {
public void testRip() throws IOException {
MyreadingmangaRipper ripper = new MyreadingmangaRipper(new URL("https://myreadingmanga.info/zelo-lee-brave-lover-dj-slave-market-jp/"));
testRipper(ripper);
}
}

View File

@ -0,0 +1,61 @@
package com.rarchives.ripme.tst.ui;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.ResourceBundle;
import java.util.Set;
import com.rarchives.ripme.utils.Utils;
import org.apache.log4j.Logger;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class LabelsBundlesTest {
private Logger logger = Logger.getLogger(Utils.class);
private static final String DEFAULT_LANG = "en_US";
@Test
void testKeyCount() {
ResourceBundle defaultBundle = Utils.getResourceBundle(null);
HashMap<String, ArrayList<String>> dictionary = new HashMap<>();
for (String lang : Utils.getSupportedLanguages()) {
ResourceBundle.clearCache();
if (lang.equals(DEFAULT_LANG))
continue;
ResourceBundle selectedLang = Utils.getResourceBundle(lang);
for (final Enumeration<String> keys = defaultBundle.getKeys(); keys.hasMoreElements();) {
String element = keys.nextElement();
if (selectedLang.containsKey(element)
&& !selectedLang.getString(element).equals(defaultBundle.getString(element))) {
if (dictionary.get(lang) == null)
dictionary.put(lang, new ArrayList<>());
dictionary.get(lang).add(element);
}
}
}
dictionary.keySet().forEach(d -> {
logger.warn(String.format("Keys missing in %s", d));
dictionary.get(d).forEach(v -> logger.warn(v));
logger.warn("\n");
});
}
@Test
void testKeyName() {
ResourceBundle defaultBundle = Utils.getResourceBundle(null);
Set<String> defaultSet = defaultBundle.keySet();
for (String lang : Utils.getSupportedLanguages()) {
if (lang.equals(DEFAULT_LANG))
continue;
for (String key : Utils.getResourceBundle(lang).keySet()) {
assertTrue(defaultSet.contains(key),
String.format("The key %s of %s is not in the default bundle", key, lang));
}
}
}
}

16
workspace.code-workspace Normal file
View File

@ -0,0 +1,16 @@
{
"folders": [
{
"path": "E:\\Downloads\\_Isaaku\\dev"
}
],
"settings": {
"files.exclude": {
"**/.classpath": false,
"**/.project": true,
"**/.settings": true,
"**/.factorypath": true
},
"java.configuration.updateBuildConfiguration": "automatic"
}
}