1
0
mirror of https://github.com/RipMeApp/ripme.git synced 2025-01-18 05:08:15 +01:00

Merge branch 'ripme-master' into issues/twitter_ripper

This commit is contained in:
Isaaku 2019-10-22 11:50:04 -05:00
commit 419c5f4f2f
16 changed files with 378 additions and 6 deletions

View File

@ -3,7 +3,7 @@
<groupId>com.rarchives.ripme</groupId>
<artifactId>ripme</artifactId>
<packaging>jar</packaging>
<version>1.7.87</version>
<version>1.7.88</version>
<name>ripme</name>
<url>http://rip.rarchives.com</url>
<properties>

View File

@ -1,6 +1,7 @@
{
"currentHash": "52dfb707d6247f44949c0d97b19c7815dc848b26837b98ae561c0dea20993a12",
"currentHash": "3c32b3b41b60fccbfa3427889dda583964332d44ebd7478755d02257e294454d",
"changeList": [
"1.7.88: Added ripper for Myreadingmanga.info; Added Mastodon rippers; Fix queue count update when queue is 0; Added ripper for listal; Now downloads best video when ripping twitter",
"1.7.87: Added ripper for allporncomic.com; Fixed Xhamster ripper; Added support xhamster2.com and xhamster.desi; Fixes for gfycat thumbs urls",
"1.7.86: Added Meituri Ripper; fixed -u flag; Fixed pornhub ripper; Xhamster ripper can now queue users videos",
"1.7.85: Fixed instagram ripper; Flickr ripper now downloads largest image",
@ -259,5 +260,5 @@
"1.0.3: Added VK.com ripper",
"1.0.1: Added auto-update functionality"
],
"latestVersion": "1.7.87"
"latestVersion": "1.7.88"
}

View File

@ -0,0 +1,20 @@
package com.rarchives.ripme.ripper.rippers;
import java.io.IOException;
import java.net.URL;
public class ArtAlleyRipper extends MastodonRipper {
public ArtAlleyRipper(URL url) throws IOException {
super(url);
}
@Override
public String getHost() {
return "artalley";
}
@Override
public String getDomain() {
return "artalley.social";
}
}

View File

@ -0,0 +1,20 @@
package com.rarchives.ripme.ripper.rippers;
import java.io.IOException;
import java.net.URL;
public class BaraagRipper extends MastodonRipper {
public BaraagRipper(URL url) throws IOException {
super(url);
}
@Override
public String getHost() {
return "baraag";
}
@Override
public String getDomain() {
return "baraag.net";
}
}

View File

@ -0,0 +1,91 @@
package com.rarchives.ripme.ripper.rippers;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jsoup.Connection.Response;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
import com.rarchives.ripme.utils.Http;
/**
*
* @author randomcommitter
*/
public class ErotivRipper extends AbstractHTMLRipper {
boolean rippingProfile;
public ErotivRipper(URL url) throws IOException {
super(url);
}
@Override
public String getDomain() {
return "erotiv.io";
}
@Override
public String getHost() {
return "erotiv";
}
@Override
public String getGID(URL url) throws MalformedURLException {
Pattern p = Pattern.compile("^https?://(?:www.)?erotiv.io/e/([0-9]*)/?$");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
return m.group(1);
}
throw new MalformedURLException("erotiv video not found in " + url + ", expected https://erotiv.io/e/id");
}
@Override
public Document getFirstPage() throws IOException {
Response resp = Http.url(this.url).ignoreContentType().response();
return resp.parse();
}
@Override
public URL sanitizeURL(URL url) throws MalformedURLException {
return new URL(url.toExternalForm().replaceAll("https?://www.erotiv.io", "https://erotiv.io"));
}
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> results = new ArrayList<>();
for (Element el : doc.select("video[id=\"video-id\"] > source")) {
if (el.hasAttr("src")) {
Pattern p = Pattern.compile("/uploads/[0-9]*\\.mp4");
Matcher m = p.matcher(el.attr("src"));
if (m.matches()) {
results.add("https://erotiv.io" + el.attr("src"));
}
}
}
return results;
}
@Override
public void downloadURL(URL url, int index) {
addURLToDownload(url, getPrefix(index));
}
@Override
public boolean hasQueueSupport() {
return true;
}
}

View File

@ -0,0 +1,96 @@
package com.rarchives.ripme.ripper.rippers;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.json.JSONObject;
import org.json.JSONArray;
import com.rarchives.ripme.ripper.AbstractHTMLRipper;
import com.rarchives.ripme.utils.Http;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
public class MastodonRipper extends AbstractHTMLRipper {
private Map<String, String> itemIDs = Collections.synchronizedMap(new HashMap<String, String>());
public MastodonRipper(URL url) throws IOException {
super(url);
}
@Override
public String getHost() {
return "mastodon";
}
@Override
public String getDomain() {
return "mastodon.social";
}
@Override
public String getGID(URL url) throws MalformedURLException {
Pattern p = Pattern.compile("^https?://(" + getDomain() + ")/@([a-zA-Z0-9_-]+)(/media/?)?$");
Matcher m = p.matcher(url.toExternalForm());
if (m.matches()) {
// Return the text contained between () in the regex
return m.group(1) + "@" + m.group(2);
}
throw new MalformedURLException(
"Expected " + getDomain() + " URL format: " +
getDomain() + "/@username - got " + url + " instead");
}
@Override
public Document getFirstPage() throws IOException {
Pattern p = Pattern.compile("^/@[a-zA-Z0-9_-]+/media/?$");
Matcher m = p.matcher(url.getPath());
if (m.matches()) {
return Http.url(url).get();
}
return Http.url(url.toExternalForm().replaceAll("/$", "") + "/media").get();
}
@Override
public Document getNextPage(Document doc) throws IOException {
Elements hrefs = doc.select(".h-entry + .entry > a.load-more.load-gap");
if (hrefs.isEmpty()) {
throw new IOException("No more pages");
}
String nextUrl = hrefs.last().attr("href");
sleep(500);
return Http.url(nextUrl).get();
}
@Override
public List<String> getURLsFromPage(Document doc) {
List<String> result = new ArrayList<String>();
for (Element el : doc.select("[data-component=\"MediaGallery\"]")) {
String props = el.attr("data-props");
JSONObject obj = new JSONObject(props);
JSONArray arr = obj.getJSONArray("media");
for (int i = 0; i < arr.length(); i++) {
String url = arr.getJSONObject(i).getString("url");
result.add(url);
String id = arr.getJSONObject(i).getString("id");
itemIDs.put(url, id);
}
}
return result;
}
@Override
public void downloadURL(URL url, int index) {
addURLToDownload(url, itemIDs.get(url.toString()) + "_");
}
}

View File

@ -0,0 +1,20 @@
package com.rarchives.ripme.ripper.rippers;
import java.io.IOException;
import java.net.URL;
public class MastodonXyzRipper extends MastodonRipper {
public MastodonXyzRipper(URL url) throws IOException {
super(url);
}
@Override
public String getHost() {
return "mastodonxyz";
}
@Override
public String getDomain() {
return "mastodon.xyz";
}
}

View File

@ -0,0 +1,20 @@
package com.rarchives.ripme.ripper.rippers;
import java.io.IOException;
import java.net.URL;
public class PawooRipper extends MastodonRipper {
public PawooRipper(URL url) throws IOException {
super(url);
}
@Override
public String getHost() {
return "pawoo";
}
@Override
public String getDomain() {
return "pawoo.net";
}
}

View File

@ -23,7 +23,7 @@ import com.rarchives.ripme.utils.Utils;
public class UpdateUtils {
private static final Logger logger = Logger.getLogger(UpdateUtils.class);
private static final String DEFAULT_VERSION = "1.7.87";
private static final String DEFAULT_VERSION = "1.7.88";
private static final String REPO_NAME = "ripmeapp/ripme";
private static final String updateJsonURL = "https://raw.githubusercontent.com/" + REPO_NAME + "/master/ripme.json";
private static String mainFileName;

View File

@ -11,7 +11,7 @@ current.version = Versão atual
check.for.updates = Verificar atualizações
auto.update = Atualização automática?
max.download.threads = Número máximo de processos de transferência
timeout.mill = Timeout (em milissegundos):
timeout.mill = Tempo de espera (em milissegundos):
retry.download.count = Número de novas tentativas de transferência
overwrite.existing.files = Sobrescrever ficheiros existentes?
sound.when.rip.completes = Notificar quando o rip é concluído
@ -54,4 +54,4 @@ http.status.exception = HTTP status exception
exception.while.downloading.file = Exception while downloading file
failed.to.download = Failed to download
skipping = Skipping
file.already.exists = file already exists
file.already.exists = file already exists

View File

@ -0,0 +1,15 @@
package com.rarchives.ripme.tst.ripper.rippers;
import java.io.IOException;
import java.net.URL;
import com.rarchives.ripme.ripper.rippers.ArtAlleyRipper;
import org.junit.jupiter.api.Test;
public class ArtAlleyRipperTest extends RippersTest {
@Test
public void testRip() throws IOException {
ArtAlleyRipper ripper = new ArtAlleyRipper(new URL("https://artalley.social/@curator/media"));
testRipper(ripper);
}
}

View File

@ -0,0 +1,15 @@
package com.rarchives.ripme.tst.ripper.rippers;
import java.io.IOException;
import java.net.URL;
import com.rarchives.ripme.ripper.rippers.BaraagRipper;
import org.junit.jupiter.api.Test;
public class BaraagRipperTest extends RippersTest {
@Test
public void testRip() throws IOException {
BaraagRipper ripper = new BaraagRipper(new URL("https://baraag.net/@darkshadow777/media"));
testRipper(ripper);
}
}

View File

@ -0,0 +1,29 @@
package com.rarchives.ripme.tst.ripper.rippers;
import java.io.IOException;
import java.net.URL;
import com.rarchives.ripme.ripper.rippers.ErotivRipper;
import org.junit.jupiter.api.Test;
public class ErotivRipperTest extends RippersTest {
@Test
public void testGetGID() throws IOException {
URL url = new URL("https://erotiv.io/e/1568314255");
ErotivRipper ripper = new ErotivRipper(url);
assert("1568314255".equals(ripper.getGID(url)));
}
public void testRip() throws IOException {
URL url = new URL("https://erotiv.io/e/1568314255");
ErotivRipper ripper = new ErotivRipper(url);
testRipper(ripper);
}
@Test
public void testGetURLsFromPage() throws IOException {
URL url = new URL("https://erotiv.io/e/1568314255");
ErotivRipper ripper = new ErotivRipper(url);
assert(1 == ripper.getURLsFromPage(ripper.getFirstPage()).size());
}
}

View File

@ -0,0 +1,15 @@
package com.rarchives.ripme.tst.ripper.rippers;
import java.io.IOException;
import java.net.URL;
import com.rarchives.ripme.ripper.rippers.MastodonRipper;
import org.junit.jupiter.api.Test;
public class MastodonRipperTest extends RippersTest {
@Test
public void testRip() throws IOException {
MastodonRipper ripper = new MastodonRipper(new URL("https://mastodon.social/@pythonhub/media"));
testRipper(ripper);
}
}

View File

@ -0,0 +1,15 @@
package com.rarchives.ripme.tst.ripper.rippers;
import java.io.IOException;
import java.net.URL;
import com.rarchives.ripme.ripper.rippers.MastodonXyzRipper;
import org.junit.jupiter.api.Test;
public class MastodonXyzRipperTest extends RippersTest {
@Test
public void testRip() throws IOException {
MastodonXyzRipper ripper = new MastodonXyzRipper(new URL("https://mastodon.xyz/@artwo/media"));
testRipper(ripper);
}
}

View File

@ -0,0 +1,15 @@
package com.rarchives.ripme.tst.ripper.rippers;
import java.io.IOException;
import java.net.URL;
import com.rarchives.ripme.ripper.rippers.PawooRipper;
import org.junit.jupiter.api.Test;
public class PawooRipperTest extends RippersTest {
@Test
public void testRip() throws IOException {
PawooRipper ripper = new PawooRipper(new URL("https://pawoo.net/@halki/media"));
testRipper(ripper);
}
}