Skip to content

Commit

Permalink
Force each class to use its own Logger instance for better logs (Fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
metaprime committed Jan 7, 2025
1 parent a21303f commit d4ca148
Show file tree
Hide file tree
Showing 79 changed files with 1,167 additions and 816 deletions.
8 changes: 4 additions & 4 deletions src/main/java/com/rarchives/ripme/App.java
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
*/
public class App {

public static final Logger logger = LogManager.getLogger(App.class);
private static final Logger logger = LogManager.getLogger(App.class);
public static String stringToAppendToFoldername = null;
private static final History HISTORY = new History();

Expand Down Expand Up @@ -140,7 +140,7 @@ private static void handleArguments(String[] args) throws IOException {
Utils.setConfigString("history.location", historyLocation);
logger.info("Set history file to " + historyLocation);
}

//Allow file overwriting
if (cl.hasOption('w')) {
Utils.setConfigBoolean("file.overwrite", true);
Expand Down Expand Up @@ -173,7 +173,7 @@ private static void handleArguments(String[] args) throws IOException {
// change the default rips directory
Utils.setConfigString("rips.directory", cl.getOptionValue('l'));
}

//Re-rip <i>all</i> previous albums
if (cl.hasOption('r')) {
// Re-rip all via command-line
Expand Down Expand Up @@ -386,7 +386,7 @@ private static void loadHistory() throws IOException {
}
}

/*
/*
* @see MainWindow.saveHistory
*/
private static void saveHistory() {
Expand Down
59 changes: 33 additions & 26 deletions src/main/java/com/rarchives/ripme/ripper/AbstractHTMLRipper.java
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,10 @@
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.*;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
Expand All @@ -16,19 +19,23 @@
import java.util.List;
import java.util.Map;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jsoup.nodes.Document;

import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
import com.rarchives.ripme.ui.MainWindow;
import com.rarchives.ripme.ui.RipStatusMessage;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Http;
import com.rarchives.ripme.utils.Utils;

/**
* Simplified ripper, designed for ripping from sites by parsing HTML.
*/
public abstract class AbstractHTMLRipper extends AbstractRipper {

private static final Logger logger = LogManager.getLogger(AbstractHTMLRipper.class);

private final Map<URL, File> itemsPending = Collections.synchronizedMap(new HashMap<>());
private final Map<URL, Path> itemsCompleted = Collections.synchronizedMap(new HashMap<>());
private final Map<URL, String> itemsErrored = Collections.synchronizedMap(new HashMap<>());
Expand Down Expand Up @@ -116,7 +123,7 @@ protected boolean pageContainsAlbums(URL url) {
public void rip() throws IOException, URISyntaxException {
int index = 0;
int textindex = 0;
LOGGER.info("Retrieving " + this.url);
logger.info("Retrieving " + this.url);
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
var doc = getCachedFirstPage();

Expand All @@ -128,20 +135,20 @@ public void rip() throws IOException, URISyntaxException {

// We set doc to null here so the while loop below this doesn't fire
doc = null;
LOGGER.debug("Adding items from " + this.url + " to queue");
logger.debug("Adding items from " + this.url + " to queue");
}

List<String> doclocation = new ArrayList<>();

LOGGER.info("Got doc location " + doc.location());
logger.info("Got doc location " + doc.location());

while (doc != null) {

LOGGER.info("Processing a doc...");
logger.info("Processing a doc...");

// catch if we saw a doc location already, save the ones seen in a list
if (doclocation.contains(doc.location())) {
LOGGER.info("Already processed location " + doc.location() + " breaking");
logger.info("Already processed location " + doc.location() + " breaking");
break;
}
doclocation.add(doc.location());
Expand All @@ -151,7 +158,7 @@ public void rip() throws IOException, URISyntaxException {
break;
}

LOGGER.info("retrieving urls from doc");
logger.info("retrieving urls from doc");

List<String> imageURLs = getURLsFromPage(doc);
// If hasASAPRipping() returns true then the ripper will handle downloading the files
Expand All @@ -170,25 +177,25 @@ public void rip() throws IOException, URISyntaxException {

for (String imageURL : imageURLs) {
index += 1;
LOGGER.debug("Found image url #" + index + ": '" + imageURL + "'");
logger.debug("Found image url #" + index + ": '" + imageURL + "'");
downloadURL(new URI(imageURL).toURL(), index);
if (isStopped() || isThisATest()) {
break;
}
}
}
if (hasDescriptionSupport() && Utils.getConfigBoolean("descriptions.save", false)) {
LOGGER.debug("Fetching description(s) from " + doc.location());
logger.debug("Fetching description(s) from " + doc.location());
List<String> textURLs = getDescriptionsFromPage(doc);
if (!textURLs.isEmpty()) {
LOGGER.debug("Found description link(s) from " + doc.location());
logger.debug("Found description link(s) from " + doc.location());
for (String textURL : textURLs) {
if (isStopped() || isThisATest()) {
break;
}

textindex += 1;
LOGGER.debug("Getting description from " + textURL);
logger.debug("Getting description from " + textURL);
String[] tempDesc = getDescription(textURL,doc);

if (tempDesc != null) {
Expand All @@ -204,11 +211,11 @@ public void rip() throws IOException, URISyntaxException {
+ ".txt").exists();

if (Utils.getConfigBoolean("file.overwrite", false) || !fileExists) {
LOGGER.debug("Got description from " + textURL);
logger.debug("Got description from " + textURL);
saveText(url, "", tempDesc[0], textindex, (tempDesc.length > 1 ? tempDesc[1] : filename));
sleep(descSleepTime());
} else {
LOGGER.debug("Description from " + textURL + " already exists.");
logger.debug("Description from " + textURL + " already exists.");
}
}

Expand All @@ -224,14 +231,14 @@ public void rip() throws IOException, URISyntaxException {
sendUpdate(STATUS.LOADING_RESOURCE, "next page");
doc = getNextPage(doc);
} catch (IOException e) {
LOGGER.info("Can't get next page: " + e.getMessage());
logger.info("Can't get next page: " + e.getMessage());
break;
}
}

// If they're using a thread pool, wait for it.
if (getThreadPool() != null) {
LOGGER.debug("Waiting for threadpool " + getThreadPool().getClass().getName());
logger.debug("Waiting for threadpool " + getThreadPool().getClass().getName());
getThreadPool().waitForThreads();
}
waitForThreads();
Expand Down Expand Up @@ -296,12 +303,12 @@ private boolean saveText(URL url, String subdirectory, String text, int index, S
out.write(text.getBytes());
out.close();
} catch (IOException e) {
LOGGER.error("[!] Error creating save file path for description '" + url + "':", e);
logger.error("[!] Error creating save file path for description '" + url + "':", e);
return false;
}
LOGGER.debug("Downloading " + url + "'s description to " + saveFileAs);
logger.debug("Downloading " + url + "'s description to " + saveFileAs);
if (!saveFileAs.getParentFile().exists()) {
LOGGER.info("[+] Creating directory: " + saveFileAs.getParent());
logger.info("[+] Creating directory: " + saveFileAs.getParent());
saveFileAs.getParentFile().mkdirs();
}
return true;
Expand Down Expand Up @@ -355,7 +362,7 @@ public boolean addURLToDownload(URL url, Path saveAs, String referrer, Map<Strin
|| itemsCompleted.containsKey(url)
|| itemsErrored.containsKey(url) )) {
// Item is already downloaded/downloading, skip it.
LOGGER.info("[!] Skipping " + url + " -- already attempted: " + Utils.removeCWD(saveAs));
logger.info("[!] Skipping " + url + " -- already attempted: " + Utils.removeCWD(saveAs));
return false;
}
if (shouldIgnoreURL(url)) {
Expand All @@ -370,7 +377,7 @@ public boolean addURLToDownload(URL url, Path saveAs, String referrer, Map<Strin
Files.write(urlFile, text.getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE, StandardOpenOption.APPEND);
itemsCompleted.put(url, urlFile);
} catch (IOException e) {
LOGGER.error("Error while writing to " + urlFile, e);
logger.error("Error while writing to " + urlFile, e);
}
}
else {
Expand Down Expand Up @@ -423,7 +430,7 @@ public void downloadCompleted(URL url, Path saveAs) {

checkIfComplete();
} catch (Exception e) {
LOGGER.error("Exception while updating observer: ", e);
logger.error("Exception while updating observer: ", e);
}
}

Expand Down Expand Up @@ -486,20 +493,20 @@ public void setWorkingDir(URL url) throws IOException, URISyntaxException {
path += File.separator;
}
String title = getAlbumTitle(this.url);
LOGGER.debug("Using album title '" + title + "'");
logger.debug("Using album title '" + title + "'");

title = Utils.filesystemSafe(title);
path += title;
path = Utils.getOriginalDirectory(path) + File.separator; // check for case sensitive (unix only)

this.workingDir = new File(path);
if (!this.workingDir.exists()) {
LOGGER.info("[+] Creating directory: " + Utils.removeCWD(this.workingDir.toPath()));
logger.info("[+] Creating directory: " + Utils.removeCWD(this.workingDir.toPath()));
if (!this.workingDir.mkdirs()) {
throw new IOException("Failed creating dir: \"" + this.workingDir + "\"");
}
}
LOGGER.debug("Set working directory to: " + this.workingDir);
logger.debug("Set working directory to: " + this.workingDir);
}

/**
Expand Down
49 changes: 27 additions & 22 deletions src/main/java/com/rarchives/ripme/ripper/AbstractJSONRipper.java
Original file line number Diff line number Diff line change
@@ -1,10 +1,5 @@
package com.rarchives.ripme.ripper;

import com.rarchives.ripme.ui.RipStatusMessage;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;
import org.json.JSONObject;

import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
Expand All @@ -21,11 +16,21 @@
import java.util.List;
import java.util.Map;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.json.JSONObject;

import com.rarchives.ripme.ui.RipStatusMessage;
import com.rarchives.ripme.ui.RipStatusMessage.STATUS;
import com.rarchives.ripme.utils.Utils;

/**
* Simplified ripper, designed for ripping from sites by parsing JSON.
*/
public abstract class AbstractJSONRipper extends AbstractRipper {


private static final Logger logger = LogManager.getLogger(AbstractJSONRipper.class);

private Map<URL, File> itemsPending = Collections.synchronizedMap(new HashMap<URL, File>());
private Map<URL, Path> itemsCompleted = Collections.synchronizedMap(new HashMap<URL, Path>());
private Map<URL, String> itemsErrored = Collections.synchronizedMap(new HashMap<URL, String>());
Expand Down Expand Up @@ -65,18 +70,18 @@ public URL sanitizeURL(URL url) throws MalformedURLException, URISyntaxException
@Override
public void rip() throws IOException, URISyntaxException {
int index = 0;
LOGGER.info("Retrieving " + this.url);
logger.info("Retrieving " + this.url);
sendUpdate(STATUS.LOADING_RESOURCE, this.url.toExternalForm());
JSONObject json = getFirstPage();

while (json != null) {
List<String> imageURLs = getURLsFromJSON(json);

if (alreadyDownloadedUrls >= Utils.getConfigInteger("history.end_rip_after_already_seen", 1000000000) && !isThisATest()) {
sendUpdate(STATUS.DOWNLOAD_COMPLETE, "Already seen the last " + alreadyDownloadedUrls + " images ending rip");
break;
}

// Remove all but 1 image
if (isThisATest()) {
while (imageURLs.size() > 1) {
Expand All @@ -92,9 +97,9 @@ public void rip() throws IOException, URISyntaxException {
if (isStopped()) {
break;
}

index += 1;
LOGGER.debug("Found image url #" + index+ ": " + imageURL);
logger.debug("Found image url #" + index+ ": " + imageURL);
downloadURL(new URI(imageURL).toURL(), index);
}

Expand All @@ -106,14 +111,14 @@ public void rip() throws IOException, URISyntaxException {
sendUpdate(STATUS.LOADING_RESOURCE, "next page");
json = getNextPage(json);
} catch (IOException | URISyntaxException e) {
LOGGER.info("Can't get next page: " + e.getMessage());
logger.info("Can't get next page: " + e.getMessage());
break;
}
}

// If they're using a thread pool, wait for it.
if (getThreadPool() != null) {
LOGGER.debug("Waiting for threadpool " + getThreadPool().getClass().getName());
logger.debug("Waiting for threadpool " + getThreadPool().getClass().getName());
getThreadPool().waitForThreads();
}
waitForThreads();
Expand All @@ -126,11 +131,11 @@ protected String getPrefix(int index) {
}
return prefix;
}

/*
* ------ Methods copied from AlbumRipper ------
*/

protected boolean allowDuplicates() {
return false;
}
Expand Down Expand Up @@ -159,7 +164,7 @@ public boolean addURLToDownload(URL url, Path saveAs, String referrer, Map<Strin
|| itemsCompleted.containsKey(url)
|| itemsErrored.containsKey(url) )) {
// Item is already downloaded/downloading, skip it.
LOGGER.info("[!] Skipping " + url + " -- already attempted: " + Utils.removeCWD(saveAs));
logger.info("[!] Skipping " + url + " -- already attempted: " + Utils.removeCWD(saveAs));
return false;
}
if (shouldIgnoreURL(url)) {
Expand All @@ -174,7 +179,7 @@ public boolean addURLToDownload(URL url, Path saveAs, String referrer, Map<Strin
Files.write(urlFile, text.getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE, StandardOpenOption.APPEND);
itemsCompleted.put(url, urlFile);
} catch (IOException e) {
LOGGER.error("Error while writing to " + urlFile, e);
logger.error("Error while writing to " + urlFile, e);
}
}
else {
Expand Down Expand Up @@ -227,7 +232,7 @@ public void downloadCompleted(URL url, Path saveAs) {

checkIfComplete();
} catch (Exception e) {
LOGGER.error("Exception while updating observer: ", e);
logger.error("Exception while updating observer: ", e);
}
}

Expand Down Expand Up @@ -292,16 +297,16 @@ public void setWorkingDir(URL url) throws IOException, URISyntaxException {
} else {
title = super.getAlbumTitle(this.url);
}
LOGGER.debug("Using album title '" + title + "'");
logger.debug("Using album title '" + title + "'");

title = Utils.filesystemSafe(title);
wd = wd.resolve(title);
if (!Files.exists(wd)) {
LOGGER.info("[+] Creating directory: " + Utils.removeCWD(wd));
logger.info("[+] Creating directory: " + Utils.removeCWD(wd));
Files.createDirectory(wd);
}
this.workingDir = wd.toFile();
LOGGER.info("Set working directory to: {}", this.workingDir);
logger.info("Set working directory to: {}", this.workingDir);
}

/**
Expand Down Expand Up @@ -329,5 +334,5 @@ public String getStatusText() {
return sb.toString();
}


}
Loading

0 comments on commit d4ca148

Please sign in to comment.