Adding Cleanup

This commit is contained in:
Enki 2025-05-03 01:27:26 -07:00
parent 7bd96b2240
commit 9670d04d68
4 changed files with 107 additions and 4 deletions

View File

@ -19,6 +19,7 @@ An automated content posting bot for Nostr networks. This tool allows you to sch
### Bot Profile Management Enhancements
- [ ] Create a consistent interface for profile images across both modals
- [ ] Better Key managment.
### Relay Management Improvements

View File

@ -89,14 +89,20 @@ func (m *Manager) AddRelay(url string, read, write bool) error {
}
// Connect to the relay with a longer timeout for slow connections
ctx, cancel := context.WithTimeout(context.Background(), 20*time.Second)
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
relay, err := nostr.RelayConnect(ctx, url)
if err != nil {
// Log the error but don't fail - we'll try other relays
m.logger.Warn("Failed to connect to relay, will try others",
zap.String("relay_url", url),
zap.Error(err))
return fmt.Errorf("failed to connect to relay %s: %w", url, err)
}
m.logger.Info("Successfully connected to relay", zap.String("relay_url", url))
// Store the relay
m.relays[url] = relay
@ -144,6 +150,8 @@ func (m *Manager) PublishEvent(ctx context.Context, event *nostr.Event) ([]strin
"wss://freelay.sovbit.host",
"wss://wot.sovbit.host",
"wss://relay.nostr.band",
"wss://relay.damus.io",
"wss://relay.snort.social",
}
for _, relayURL := range defaultRelays {
@ -189,8 +197,8 @@ func (m *Manager) PublishEvent(ctx context.Context, event *nostr.Event) ([]strin
return
}
// Create a new context with timeout
publishCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
// Create a new context with longer timeout for slow relays
publishCtx, cancel := context.WithTimeout(ctx, 45*time.Second)
defer cancel()
// Publish the event

View File

@ -165,6 +165,9 @@ func (s *Scheduler) Start() error {
// Start the cron scheduler
s.cron.Start()
// Clean up content directories for bots that no longer exist in the database
go s.cleanupOrphanedContentDirectories()
return nil
}
@ -289,7 +292,8 @@ func (s *Scheduler) ScheduleBot(bot *models.Bot) error {
var ownerPubkey string
err = s.db.Get(&ownerPubkey, "SELECT owner_pubkey FROM bots WHERE id = ?", bot.ID)
if err != nil {
s.logger.Error("Failed to get bot owner pubkey",
// Change from Error to Warning level since we have a fallback
s.logger.Warn("Could not find bot owner pubkey, will use default relays",
zap.Int64("bot_id", bot.ID),
zap.Error(err))
ownerPubkey = "" // Default to empty if not found
@ -307,7 +311,15 @@ func (s *Scheduler) ScheduleBot(bot *models.Bot) error {
bot.Relays = append(bot.Relays, relay)
}
}
} else if err != nil {
s.logger.Warn("Failed to get combined relays, will use default relays",
zap.Int64("bot_id", bot.ID),
zap.Error(err))
}
} else {
s.logger.Info("Using default relays for posting",
zap.Int64("bot_id", bot.ID),
zap.String("bot_name", bot.Name))
}
// Rest of the function remains the same
@ -491,4 +503,61 @@ func containsRelay(relays []*models.Relay, url string) bool {
}
}
return false
}
// cleanupOrphanedContentDirectories cleans up content directories for bots that don't exist in the database
func (s *Scheduler) cleanupOrphanedContentDirectories() {
// Get all bot IDs from the database
var botIDs []int64
err := s.db.Select(&botIDs, "SELECT id FROM bots")
if err != nil {
s.logger.Error("Failed to get bot IDs from database", zap.Error(err))
return
}
// Create a map for quick lookup
validBotIDs := make(map[int64]bool)
for _, id := range botIDs {
validBotIDs[id] = true
}
// Read content directories
files, err := filepath.Glob(filepath.Join(s.contentDir, "bot_*"))
if err != nil {
s.logger.Error("Failed to read content directories", zap.Error(err))
return
}
// Check each directory
for _, file := range files {
dirName := filepath.Base(file)
var botID int64
_, err := fmt.Sscanf(dirName, "bot_%d", &botID)
if err != nil {
s.logger.Warn("Failed to parse bot ID from directory name",
zap.String("directory", dirName),
zap.Error(err))
continue
}
// Check if the bot exists
if !validBotIDs[botID] {
s.logger.Warn("Found orphaned content directory for deleted bot",
zap.Int64("bot_id", botID),
zap.String("directory", file))
// Create a renamed directory to mark it as orphaned
orphanedDir := filepath.Join(s.contentDir, fmt.Sprintf("orphaned_%s", dirName))
if err := utils.RenameDir(file, orphanedDir); err != nil {
s.logger.Error("Failed to rename orphaned content directory",
zap.String("from", file),
zap.String("to", orphanedDir),
zap.Error(err))
} else {
s.logger.Info("Renamed orphaned content directory",
zap.String("from", file),
zap.String("to", orphanedDir))
}
}
}
}

View File

@ -223,4 +223,29 @@ func GetSupportedVideoExtensions() []string {
// GetAllSupportedMediaExtensions returns all supported media extensions
func GetAllSupportedMediaExtensions() []string {
return append(GetSupportedImageExtensions(), GetSupportedVideoExtensions()...)
}
// RenameDir renames a directory from src to dst
func RenameDir(src, dst string) error {
// First check if the source directory exists
srcInfo, err := os.Stat(src)
if err != nil {
return fmt.Errorf("source directory error: %w", err)
}
if !srcInfo.IsDir() {
return fmt.Errorf("source is not a directory: %s", src)
}
// Check if destination already exists
if _, err := os.Stat(dst); err == nil {
return fmt.Errorf("destination directory already exists: %s", dst)
}
// Create parent directory for destination if needed
if err := EnsureDir(filepath.Dir(dst)); err != nil {
return fmt.Errorf("failed to create parent directory: %w", err)
}
// Perform the rename
return os.Rename(src, dst)
}