(crawler) Improve logging and error handling

This commit is contained in:
Viktor Lofgren 2025-01-21 21:44:21 +01:00
parent 274941f6de
commit 8862100f7e
2 changed files with 1 additions and 3 deletions

View File

@ -23,12 +23,10 @@ import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URLDecoder;
import java.net.http.HttpClient; import java.net.http.HttpClient;
import java.net.http.HttpRequest; import java.net.http.HttpRequest;
import java.net.http.HttpResponse; import java.net.http.HttpResponse;
import java.net.http.HttpTimeoutException; import java.net.http.HttpTimeoutException;
import java.nio.charset.StandardCharsets;
import java.time.Duration; import java.time.Duration;
import java.util.*; import java.util.*;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;

View File

@ -96,7 +96,7 @@ public class WarcRecorder implements AutoCloseable {
try { try {
response = client.send(request, java.net.http.HttpResponse.BodyHandlers.ofInputStream()); response = client.send(request, java.net.http.HttpResponse.BodyHandlers.ofInputStream());
} }
catch (IOException ex) { catch (Exception ex) {
logger.warn("Failed to fetch URL {}: {}", requestUri, ex.getMessage()); logger.warn("Failed to fetch URL {}: {}", requestUri, ex.getMessage());
return new HttpFetchResult.ResultException(ex); return new HttpFetchResult.ResultException(ex);
} }