(control) New actions view, re-arrange navigation menu

This commit is contained in:
Viktor Lofgren 2023-08-05 14:44:55 +02:00
parent 715d61dfea
commit be444f9172
9 changed files with 592 additions and 48 deletions

View File

@ -58,6 +58,7 @@ public class ControlService extends Service {
ControlFileStorageService controlFileStorageService,
ApiKeyService apiKeyService,
DomainComplaintService domainComplaintService,
ControlActionsService controlActionsService,
MqPersistence persistence
) throws IOException {
@ -88,6 +89,8 @@ public class ControlService extends Service {
var newMessageRenderer = rendererFactory.renderer("control/new-message");
var viewMessageRenderer = rendererFactory.renderer("control/view-message");
var actionsViewRenderer = rendererFactory.renderer("control/actions");
this.controlActorService = controlActorService;
this.staticResources = staticResources;
@ -101,28 +104,26 @@ public class ControlService extends Service {
Spark.get("/public/", (req, rsp) -> indexRenderer.render(Map.of()));
Spark.get("/public/actions", (rq,rsp) -> new Object() , actionsViewRenderer::render);
Spark.get("/public/services", this::servicesModel, servicesRenderer::render);
Spark.get("/public/services/:id", this::serviceModel, serviceByIdRenderer::render);
Spark.get("/public/messages/:id", this::existingMessageModel, gson::toJson);
Spark.get("/public/actors", this::processesModel, actorsRenderer::render);
Spark.get("/public/actors/:fsm", this::actorDetailsModel, actorDetailsRenderer::render);
Spark.get("/public/storage", this::storageModel, storageRenderer::render);
Spark.get("/public/storage/specs", this::storageModelSpecs, storageSpecsRenderer::render);
Spark.get("/public/storage/crawls", this::storageModelCrawls, storageCrawlsRenderer::render);
Spark.get("/public/storage/processed", this::storageModelProcessed, storageProcessedRenderer::render);
Spark.get("/public/storage/:id", this::storageDetailsModel, storageDetailsRenderer::render);
Spark.get("/public/storage/:id/file", controlFileStorageService::downloadFileFromStorage);
final HtmlRedirect redirectToServices = new HtmlRedirect("/services");
final HtmlRedirect redirectToProcesses = new HtmlRedirect("/actors");
final HtmlRedirect redirectToActors = new HtmlRedirect("/actors");
final HtmlRedirect redirectToApiKeys = new HtmlRedirect("/api-keys");
final HtmlRedirect redirectToStorage = new HtmlRedirect("/storage");
final HtmlRedirect redirectToComplaints = new HtmlRedirect("/complaints");
final HtmlRedirect redirectToMessageQueue = new HtmlRedirect("/message-queue");
Spark.post("/public/fsms/:fsm/start", controlActorService::startFsm, redirectToProcesses);
Spark.post("/public/fsms/:fsm/stop", controlActorService::stopFsm, redirectToProcesses);
// FSMs
Spark.post("/public/fsms/:fsm/start", controlActorService::startFsm, redirectToActors);
Spark.post("/public/fsms/:fsm/stop", controlActorService::stopFsm, redirectToActors);
// Message Queue
Spark.get("/public/message-queue", this::messageQueueModel, messageQueueRenderer::render);
Spark.post("/public/message-queue/", (rq, rsp) -> {
@ -156,14 +157,26 @@ public class ControlService extends Service {
return "";
}, redirectToMessageQueue);
Spark.post("/public/storage/:fid/crawl", controlActorService::triggerCrawling, redirectToProcesses);
Spark.post("/public/storage/:fid/recrawl", controlActorService::triggerRecrawling, redirectToProcesses);
Spark.post("/public/storage/:fid/process", controlActorService::triggerProcessing, redirectToProcesses);
Spark.post("/public/storage/:fid/load", controlActorService::loadProcessedData, redirectToProcesses);
// Storage
Spark.get("/public/storage", this::storageModel, storageRenderer::render);
Spark.get("/public/storage/specs", this::storageModelSpecs, storageSpecsRenderer::render);
Spark.get("/public/storage/crawls", this::storageModelCrawls, storageCrawlsRenderer::render);
Spark.get("/public/storage/processed", this::storageModelProcessed, storageProcessedRenderer::render);
Spark.get("/public/storage/:id", this::storageDetailsModel, storageDetailsRenderer::render);
Spark.get("/public/storage/:id/file", controlFileStorageService::downloadFileFromStorage);
// Storage Actions
Spark.post("/public/storage/:fid/crawl", controlActorService::triggerCrawling, redirectToActors);
Spark.post("/public/storage/:fid/recrawl", controlActorService::triggerRecrawling, redirectToActors);
Spark.post("/public/storage/:fid/process", controlActorService::triggerProcessing, redirectToActors);
Spark.post("/public/storage/:fid/load", controlActorService::loadProcessedData, redirectToActors);
Spark.post("/public/storage/specs", controlActorService::createCrawlSpecification, redirectToStorage);
Spark.post("/public/storage/:fid/delete", controlFileStorageService::flagFileForDeletionRequest, redirectToStorage);
// API Keys
Spark.get("/public/api-keys", this::apiKeysModel, apiKeysRenderer::render);
Spark.post("/public/api-keys", this::createApiKey, redirectToApiKeys);
Spark.delete("/public/api-keys/:key", this::deleteApiKey, redirectToApiKeys);
@ -173,6 +186,16 @@ public class ControlService extends Service {
Spark.get("/public/complaints", this::complaintsModel, domainComplaintsRenderer::render);
Spark.post("/public/complaints/:domain", this::reviewComplaint, redirectToComplaints);
// Actions
Spark.post("/public/actions/calculate-adjacencies", controlActionsService::calculateAdjacencies, redirectToActors);
Spark.post("/public/actions/repartition-index", controlActionsService::triggerRepartition, redirectToActors);
Spark.post("/public/actions/reconvert-index", controlActionsService::triggerReconversion, redirectToActors);
Spark.post("/public/actions/trigger-data-exports", controlActionsService::triggerDataExports, redirectToActors);
Spark.post("/public/actions/flush-search-caches", controlActionsService::flushSearchCaches, redirectToActors);
Spark.post("/public/actions/flush-api-caches", controlActionsService::flushApiCaches, redirectToActors);
Spark.post("/public/actions/flush-links-database", controlActionsService::flushLinkDatabase, redirectToActors);
Spark.get("/public/:resource", this::serveStatic);
monitors.subscribe(this::logMonitorStateChange);

View File

@ -45,7 +45,9 @@ public class ControlActors {
ProcessLivenessMonitorActor processMonitorFSM,
FileStorageMonitorActor fileStorageMonitorActor,
TriggerAdjacencyCalculationActor triggerAdjacencyCalculationActor,
CrawlJobExtractorActor crawlJobExtractorActor
CrawlJobExtractorActor crawlJobExtractorActor,
ExportDataActor exportDataActor,
FlushLinkDatabase flushLinkDatabase
) {
this.messageQueueFactory = messageQueueFactory;
this.eventLog = baseServiceParams.eventLog;
@ -62,6 +64,8 @@ public class ControlActors {
register(Actor.FILE_STORAGE_MONITOR, fileStorageMonitorActor);
register(Actor.ADJACENCY_CALCULATION, triggerAdjacencyCalculationActor);
register(Actor.CRAWL_JOB_EXTRACTOR, crawlJobExtractorActor);
register(Actor.EXPORT_DATA, exportDataActor);
register(Actor.FLUSH_LINK_DATABASE, flushLinkDatabase);
}
private void register(Actor process, AbstractStateGraph graph) {

View File

@ -0,0 +1,192 @@
package nu.marginalia.control.actor.task;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.zaxxer.hikari.HikariDataSource;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import lombok.With;
import nu.marginalia.db.storage.FileStorageService;
import nu.marginalia.db.storage.model.FileStorageId;
import nu.marginalia.db.storage.model.FileStorageType;
import nu.marginalia.mqsm.StateFactory;
import nu.marginalia.mqsm.graph.AbstractStateGraph;
import nu.marginalia.mqsm.graph.GraphState;
import nu.marginalia.mqsm.graph.ResumeBehavior;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.OutputStreamWriter;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.PosixFilePermissions;
import java.util.zip.GZIPOutputStream;
@Singleton
public class ExportDataActor extends AbstractStateGraph {
private static final String blacklistFilename = "blacklist.csv.gz";
private static final String domainsFilename = "domains.csv.gz";
private static final String linkGraphFilename = "linkgraph.csv.gz";
// STATES
public static final String INITIAL = "INITIAL";
public static final String EXPORT_DOMAINS = "EXPORT-DOMAINS";
public static final String EXPORT_BLACKLIST = "EXPORT-BLACKLIST";
public static final String EXPORT_LINK_GRAPH = "EXPORT-LINK-GRAPH";
public static final String END = "END";
private final FileStorageService storageService;
private final HikariDataSource dataSource;
private final Logger logger = LoggerFactory.getLogger(getClass());
@AllArgsConstructor @With @NoArgsConstructor
public static class Message {
public FileStorageId storageId = null;
};
@Inject
public ExportDataActor(StateFactory stateFactory,
FileStorageService storageService,
HikariDataSource dataSource)
{
super(stateFactory);
this.storageService = storageService;
this.dataSource = dataSource;
}
@GraphState(name = INITIAL,
next = EXPORT_BLACKLIST,
description = """
Find EXPORT storage area, then transition to EXPORT-BLACKLIST.
""")
public Message init(Integer i) throws Exception {
var storage = storageService.getStorageByType(FileStorageType.EXPORT);
if (storage == null) error("Bad storage id");
return new Message().withStorageId(storage.id());
}
@GraphState(name = EXPORT_BLACKLIST,
next = EXPORT_DOMAINS,
resume = ResumeBehavior.ERROR,
description = """
Export the blacklist from the database to the EXPORT storage area.
"""
)
public Message exportBlacklist(Message message) throws Exception {
var storage = storageService.getStorage(message.storageId);
var tmpFile = Files.createTempFile(storage.asPath(), "export", ".csv.gz",
PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rw-r--r--")));
try (var bw = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(Files.newOutputStream(tmpFile, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING))));
var conn = dataSource.getConnection();
var stmt = conn.prepareStatement("SELECT URL_DOMAIN FROM EC_DOMAIN_BLACKLIST");
)
{
stmt.setFetchSize(1000);
var rs = stmt.executeQuery();
while (rs.next()) {
bw.write(rs.getString(1));
bw.write("\n");
}
Files.move(tmpFile, storage.asPath().resolve(blacklistFilename), StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING);
}
catch (Exception ex) {
logger.error("Failed to export blacklist", ex);
error("Failed to export blacklist");
}
finally {
Files.deleteIfExists(tmpFile);
}
return message;
}
@GraphState(
name = EXPORT_DOMAINS,
next = EXPORT_LINK_GRAPH,
resume = ResumeBehavior.RETRY,
description = """
Export known domains to the EXPORT storage area.
"""
)
public Message exportDomains(Message message) throws Exception {
var storage = storageService.getStorage(message.storageId);
var tmpFile = Files.createTempFile(storage.asPath(), "export", ".csv.gz",
PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rw-r--r--")));
try (var bw = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(Files.newOutputStream(tmpFile, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING))));
var conn = dataSource.getConnection();
var stmt = conn.prepareStatement("SELECT DOMAIN_NAME, ID, INDEXED, STATE FROM EC_DOMAIN");
)
{
stmt.setFetchSize(1000);
var rs = stmt.executeQuery();
while (rs.next()) {
bw.write(rs.getString("DOMAIN_NAME"));
bw.write(",");
bw.write(rs.getString("ID"));
bw.write(",");
bw.write(rs.getString("INDEXED"));
bw.write(",");
bw.write(rs.getString("STATE"));
bw.write("\n");
}
Files.move(tmpFile, storage.asPath().resolve(domainsFilename), StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING);
}
catch (Exception ex) {
logger.error("Failed to export domains", ex);
error("Failed to export domains");
}
finally {
Files.deleteIfExists(tmpFile);
}
return message;
}
@GraphState(
name = EXPORT_LINK_GRAPH,
next = END,
resume = ResumeBehavior.RETRY,
description = """
Export known domains to the EXPORT storage area.
"""
)
public Message exportLinkGraph(Message message) throws Exception {
var storage = storageService.getStorage(message.storageId);
var tmpFile = Files.createTempFile(storage.asPath(), "export", ".csv.gz",
PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rw-r--r--")));
try (var bw = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(Files.newOutputStream(tmpFile, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING))));
var conn = dataSource.getConnection();
var stmt = conn.prepareStatement("SELECT SOURCE_DOMAIN_ID, DEST_DOMAIN_ID FROM EC_DOMAIN_LINK");
)
{
stmt.setFetchSize(1000);
var rs = stmt.executeQuery();
while (rs.next()) {
bw.write(rs.getString("SOURCE_DOMAIN_ID"));
bw.write(",");
bw.write(rs.getString("DEST_DOMAIN_ID"));
bw.write("\n");
}
Files.move(tmpFile, storage.asPath().resolve(linkGraphFilename), StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING);
}
catch (Exception ex) {
logger.error("Failed to export link graph", ex);
error("Failed to export link graph");
}
finally {
Files.deleteIfExists(tmpFile);
}
return message;
}
}

View File

@ -0,0 +1,85 @@
package nu.marginalia.control.actor.task;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.zaxxer.hikari.HikariDataSource;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import lombok.With;
import nu.marginalia.db.storage.model.FileStorageId;
import nu.marginalia.mqsm.StateFactory;
import nu.marginalia.mqsm.graph.AbstractStateGraph;
import nu.marginalia.mqsm.graph.GraphState;
import nu.marginalia.mqsm.graph.ResumeBehavior;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.OutputStreamWriter;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.PosixFilePermissions;
import java.sql.SQLException;
import java.util.zip.GZIPOutputStream;
@Singleton
public class FlushLinkDatabase extends AbstractStateGraph {
// STATES
public static final String INITIAL = "INITIAL";
public static final String FLUSH_DATABASE = "FLUSH_DATABASE";
public static final String END = "END";
private final HikariDataSource dataSource;
private final Logger logger = LoggerFactory.getLogger(getClass());
@AllArgsConstructor @With @NoArgsConstructor
public static class Message {
public FileStorageId storageId = null;
};
@Inject
public FlushLinkDatabase(StateFactory stateFactory,
HikariDataSource dataSource)
{
super(stateFactory);
this.dataSource = dataSource;
}
@GraphState(name = INITIAL,
next = FLUSH_DATABASE,
description = """
Initial stage
""")
public void init(Integer i) throws Exception {
}
@GraphState(name = FLUSH_DATABASE,
next = END,
resume = ResumeBehavior.ERROR,
description = """
Truncate the domain and link tables.
"""
)
public void exportBlacklist() throws Exception {
try (var conn = dataSource.getConnection();
var stmt = conn.createStatement())
{
stmt.executeUpdate("SET FOREIGN_KEY_CHECKS = 0");
stmt.executeUpdate("TRUNCATE TABLE EC_PAGE_DATA");
stmt.executeUpdate("TRUNCATE TABLE EC_URL");
stmt.executeUpdate("TRUNCATE TABLE EC_DOMAIN_LINK");
stmt.executeUpdate("TRUNCATE TABLE DOMAIN_METADATA");
stmt.executeUpdate("SET FOREIGN_KEY_CHECKS = 1");
}
catch (SQLException ex) {
logger.error("Failed to truncate tables", ex);
error("Failed to truncate tables");
}
}
}

View File

@ -11,8 +11,9 @@ public enum Actor {
PROCESS_LIVENESS_MONITOR,
FILE_STORAGE_MONITOR,
ADJACENCY_CALCULATION,
CRAWL_JOB_EXTRACTOR
;
CRAWL_JOB_EXTRACTOR,
EXPORT_DATA,
FLUSH_LINK_DATABASE;
public String id() {

View File

@ -0,0 +1,111 @@
package nu.marginalia.control.svc;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import nu.marginalia.control.actor.ControlActors;
import nu.marginalia.control.model.Actor;
import nu.marginalia.index.client.IndexClient;
import nu.marginalia.index.client.IndexMqEndpoints;
import nu.marginalia.mq.MessageQueueFactory;
import nu.marginalia.mq.outbox.MqOutbox;
import nu.marginalia.search.client.SearchClient;
import nu.marginalia.search.client.SearchMqEndpoints;
import nu.marginalia.service.control.ServiceEventLog;
import nu.marginalia.service.id.ServiceId;
import nu.marginalia.service.server.BaseServiceParams;
import spark.Request;
import spark.Response;
import spark.Spark;
import java.util.UUID;
@Singleton
public class ControlActionsService {
private final ControlActors actors;
private final SearchClient searchClient;
private final IndexClient indexClient;
private final MqOutbox apiOutbox;
private final ServiceEventLog eventLog;
@Inject
public ControlActionsService(ControlActors actors,
SearchClient searchClient,
IndexClient indexClient,
MessageQueueFactory mqFactory,
ServiceEventLog eventLog) {
this.actors = actors;
this.searchClient = searchClient;
this.indexClient = indexClient;
this.apiOutbox = createApiOutbox(mqFactory);
this.eventLog = eventLog;
}
/** This is a hack to get around the fact that the API service is not a core service
* and lacks a proper internal API
*/
private MqOutbox createApiOutbox(MessageQueueFactory mqFactory) {
String inboxName = ServiceId.Api.name + ":" + "0";
String outboxName = System.getProperty("service-name", UUID.randomUUID().toString());
return mqFactory.createOutbox(inboxName, outboxName, UUID.randomUUID());
}
public Object calculateAdjacencies(Request request, Response response) throws Exception {
eventLog.logEvent("USER-ACTION", "CALCULATE-ADJACENCIES");
actors.start(Actor.ADJACENCY_CALCULATION);
return "";
}
public Object triggerDataExports(Request request, Response response) throws Exception {
eventLog.logEvent("USER-ACTION", "EXPORT-DATA");
actors.start(Actor.EXPORT_DATA);
return "";
}
public Object flushSearchCaches(Request request, Response response) throws Exception {
eventLog.logEvent("USER-ACTION", "FLUSH-SEARCH-CACHES");
searchClient.outbox().sendNotice(SearchMqEndpoints.FLUSH_CACHES, "");
return "";
}
public Object flushApiCaches(Request request, Response response) throws Exception {
eventLog.logEvent("USER-ACTION", "FLUSH-API-CACHES");
apiOutbox.sendNotice("FLUSH_CACHES", "");
return "";
}
public Object flushLinkDatabase(Request request, Response response) throws Exception {
String footgunLicense = request.queryParams("footgun-license");
if (!"YES".equals(footgunLicense)) {
Spark.halt(403);
return "You must agree to the footgun license to flush the link database";
}
eventLog.logEvent("USER-ACTION", "FLUSH-LINK-DATABASE");
actors.start(Actor.FLUSH_LINK_DATABASE);
return "";
}
public Object triggerRepartition(Request request, Response response) throws Exception {
indexClient.outbox().sendAsync(IndexMqEndpoints.INDEX_REPARTITION, "");
return null;
}
public Object triggerReconversion(Request request, Response response) throws Exception {
indexClient.outbox().sendAsync(IndexMqEndpoints.INDEX_REINDEX, "");
return null;
}
}

View File

@ -0,0 +1,103 @@
<!DOCTYPE html>
<html>
<head>
<title>Control Service</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="stylesheet" href="/style.css" />
</head>
<body>
{{> control/partials/nav}}
<section>
<h1>Actions</h1>
<table style="max-width: 80ch">
<tr>
<th>Action</th><th>Trigger</th>
</tr>
<tr>
<td><b>Trigger Adjacency Calculation</b><p>
This will trigger a recalculation of website similarities, which affects
the rankings calculations.
</td>
<td>
<form method="post" action="/actions/calculate-adjacencies" onsubmit="return confirm('Confirm adjacency recalculation')">
<input type="submit" value="Trigger Calculations">
</form>
</td>
</tr>
<tr>
<td><b>Repartition Index</b><p>
This will recalculate the rankings and search sets for the index.
</td>
<td>
<form method="post" action="/actions/repartition-index" onsubmit="return confirm('Confirm repartition')">
<input type="submit" value="Trigger Repartitioning">
</form>
</td>
</tr>
<tr>
<td><b>Reconvert Index</b><p>
This will reconstruct the index from the index journal.
</td>
<td>
<form method="post" action="/actions/reconvert-index" onsubmit="return confirm('Confirm reconversion')">
<input type="submit" value="Trigger Reconversion">
</form>
</td>
</tr>
<tr>
<td><b>Flush <tt>search-service</tt> Caches</b><p>
This will instruct the search-service to flush its caches,
getting rid of any stale data. This may rarely be necessary after
reloading the index.
</td>
<td>
<form method="post" action="/actions/flush-search-caches" onsubmit="return confirm('Confirm flushing search chaches')">
<input type="submit" value="Flush Search">
</form>
</td>
</tr>
<tr>
<td><b>Flush <tt>api-service</tt> Caches</b><p>
This will instruct the api-service to flush its caches,
getting rid of any stale data. This will be necessary after
changes to the API licenses directly through the database.
</td>
<td>
<form method="post" action="/actions/flush-index-caches" onsubmit="return confirm('Confirm flushing api chaches')">
<input type="submit" value="Flush API">
</form>
</td>
</tr>
<tr>
<td><b>Trigger Data Exports</b><p>
This exports the data from the database into a set of CSV files
</td>
<td>
<form method="post" action="/actions/trigger-data-exports" onsubmit="return confirm('Confirm triggering of exports')">
<input type="submit" value="Export Data">
</form>
</td>
</tr>
<tr>
<th colspan="2">
WARNING -- Destructive Actions Below This Line
</th>
</tr>
<tr>
<td><b>Flush Links Database.</b><p>
<span style="color:red">This will drop all known URLs and domain links.</span><br>
This action is not reversible.
</td>
<td>
<form method="post" action="/actions/flush-links-database" onsubmit="return confirm('Last chance, you are about to flush the link database')">
<label for="footgun-license">Type exactly "YES" to confirm</label><br>
<input id="footgun-license" name="footgun-license" value="NO">
<br><br>
<input type="submit" value="TRUNCATE TABLE ...">
</form>
</td>
</tr>
</table>
</section>
</body>
</html>

View File

@ -1,12 +1,16 @@
<nav>
<ul>
<li><a href="/">Overview</a></li>
<li><a href="/services">Services</a></li>
<li><a href="/actors">Actors</a></li>
<li><a href="/message-queue">Message Queue</a></li>
<li><a href="/storage">Storage</a></li>
<li>---</li>
<li><a href="/api-keys">API Keys</a></li>
<li><a href="/blacklist">Blacklist</a></li>
<li><a href="/complaints">Complaints</a></li>
<li>---</li>
<li><a href="/actions">Actions</a></li>
<li><a href="/storage">Storage</a></li>
<li>---</li>
<li><a href="/services">Services</a></li>
<li><a href="/actors">Actors</a></li>
<li><a href="/message-queue">Message Queue</a></li>
</ul>
</nav>

View File

@ -45,33 +45,54 @@
{{/if}}
<h2>Actions</h2>
{{#with storage.self}}
{{#if isCrawlable}}
<form method="post" action="/storage/{{storage.id}}/crawl" onsubmit="return confirm('Confirm crawling of {{storage.path}}')">
Perform a full re-crawl of this data: <button type="submit">Crawl</button> <br>
</form>
{{/if}}
{{#if isLoadable}}
<form method="post" action="/storage/{{storage.id}}/load" onsubmit="return confirm('Confirm loading of {{storage.path}}')">
Load this data into index: <button type="submit">Load</button> <br>
</form>
{{/if}}
{{#if isConvertible}}
<form method="post" action="/storage/{{storage.id}}/process" onsubmit="return confirm('Confirm processing of {{storage.path}}')">
Process and load this data into index: <button type="submit">Process</button> <br>
</form>
{{/if}}
{{#if isRecrawlable}}
<form method="post" action="/storage/{{storage.id}}/recrawl" onsubmit="return confirm('Confirm re-crawling of {{storage.path}}')">
Perform a re-crawl of this data: <button type="submit">Recrawl</button><br>
</form>
{{/if}}
{{#if isDeletable}}
<form method="post" action="/storage/{{storage.id}}/delete" onsubmit="return confirm('Confirm deletion of {{storage.path}}')">
Delete this data: <button type="submit">Delete</button><br>
</form>
{{/if}}
{{/with}}
<table>
<tr>
<th>Description</th>
<th>Trigger</th>
</tr>
{{#with storage.self}}
{{#if isCrawlable}}
<form method="post" action="/storage/{{storage.id}}/crawl" onsubmit="return confirm('Confirm crawling of {{storage.path}}')">
<tr>
<td>Perform a full re-crawl of this data</td>
<td><button type="submit">Crawl</button></td>
</tr>
</form>
{{/if}}
{{#if isLoadable}}
<form method="post" action="/storage/{{storage.id}}/load" onsubmit="return confirm('Confirm loading of {{storage.path}}')">
<tr>
<td>Load this data into index</td>
<td><button type="submit">Load</button></td>
</tr>
</form>
{{/if}}
{{#if isConvertible}}
<form method="post" action="/storage/{{storage.id}}/process" onsubmit="return confirm('Confirm processing of {{storage.path}}')">
<tr>
<td>Process and load this data into index</td>
<td><button type="submit">Process</button></td>
</tr>
</form>
{{/if}}
{{#if isRecrawlable}}
<form method="post" action="/storage/{{storage.id}}/recrawl" onsubmit="return confirm('Confirm re-crawling of {{storage.path}}')">
<tr>
<td>Perform a re-crawl of this data</td>
<td><button type="submit">Recrawl</button></td>
</tr>
</form>
{{/if}}
{{#if isDeletable}}
<form method="post" action="/storage/{{storage.id}}/delete" onsubmit="return confirm('Confirm deletion of {{storage.path}}')">
<tr>
<td>Delete this data</td>
<td><button type="submit">Delete</button></td>
</tr>
</form>
{{/if}}
{{/with}}
</table>
{{#if storage.related}}
<h2>Related</h2>
<table>