#!/bin/bash

# NIP-29 Group Backup Script

# This script backs up all groups from a NIP-29 relay using the nak tool

# Usage: ./backup_nip29_groups.sh

set -e # Exit on any error

# Function to display usage

show_usage() {

echo "Usage: $0 "

echo "Example: $0 ./groups_backup wss://relay.example.com"

exit 1

}

# Function to log messages

log() {

echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1"

}

# Check if nak is installed

if ! command -v nak &> /dev/null; then

echo "Error: 'nak' tool is not installed or not in PATH"

echo "Please install it from: https://github.com/fiatjaf/nak"

exit 1

fi

# Check arguments

if [ $# -ne 2 ]; then

show_usage

fi

BACKUP_FOLDER="$1"

RELAY_URL="$2"

# Create backup folder if it doesn't exist

mkdir -p "$BACKUP_FOLDER"

log "Starting NIP-29 group backup from relay: $RELAY_URL"

log "Backup folder: $BACKUP_FOLDER"

# Step 1: Test relay connection and NIP-29 support

log "Testing relay connection and NIP-29 support..."

TEST_FILE=$(mktemp)

timeout 10 nak req -k 39000 "$RELAY_URL" > "$TEST_FILE" 2>/dev/null || {

log "Warning: Could not connect to relay or no kind 39000 events found"

log "This might not be a NIP-29 relay or it has no groups"

}

if [ ! -s "$TEST_FILE" ]; then

log "No group metadata events (kind 39000) found. Checking for other group kinds..."

fi

rm -f "$TEST_FILE"

# Step 1: Discover all groups by fetching group metadata events (kinds 39000-39009)

log "Discovering groups on relay..."

TEMP_GROUPS_FILE=$(mktemp)

# Query for group metadata events (NIP-29 kinds 39000-39009)

for kind in {39000..39009}; do

log "Querying for group metadata events of kind $kind..."

timeout 30 nak req -k $kind "$RELAY_URL" 2>/dev/null >> "$TEMP_GROUPS_FILE" || {

log " Timeout or error querying kind $kind, continuing..."

continue

}

done

# Also check for group control events (kinds 9000-9030) to find more groups

log "Checking for group control events..."

for kind in {9000..9030}; do

timeout 30 nak req -k $kind "$RELAY_URL" 2>/dev/null >> "$TEMP_GROUPS_FILE" || continue

done

# Extract unique group IDs from the events using the 'h' tag

log "Extracting group IDs..."

GROUP_IDS_FILE=$(mktemp)

if [ -s "$TEMP_GROUPS_FILE" ]; then

# Parse JSON events and extract 'h' tag values (group IDs)

cat "$TEMP_GROUPS_FILE" | jq -r '

select(.tags != null) |

.tags[] |

select(.[0] == "h" and length > 1) |

.[1]

' 2>/dev/null | sort -u > "$GROUP_IDS_FILE"

# Also look for 'd' tags in case groups use those as identifiers

cat "$TEMP_GROUPS_FILE" | jq -r '

select(.tags != null) |

.tags[] |

select(.[0] == "d" and length > 1) |

.[1]

' 2>/dev/null | sort -u >> "$GROUP_IDS_FILE"

# Remove duplicates again

sort -u "$GROUP_IDS_FILE" > "${GROUP_IDS_FILE}.tmp"

mv "${GROUP_IDS_FILE}.tmp" "$GROUP_IDS_FILE"

else

log "No group metadata events found on this relay"

rm -f "$TEMP_GROUPS_FILE"

exit 0

fi

# Read all group IDs into an array to avoid stdin consumption issues

mapfile -t GROUP_IDS < "$GROUP_IDS_FILE"

GROUP_COUNT=${#GROUP_IDS[@]}

log "Found $GROUP_COUNT unique group identifiers"

if [ "$GROUP_COUNT" -eq 0 ]; then

log "No groups found on this relay"

cleanup

exit 0

fi

# Show found group IDs for debugging

log "Group IDs found:"

for group_id in "${GROUP_IDS[@]}"; do

log " - $group_id"

done

# Step 2: For each group, backup all related events

for group_id in "${GROUP_IDS[@]}"; do

if [ -z "$group_id" ]; then

continue

fi

log "Backing up group: $group_id"

# Sanitize group ID for filename (replace special characters)

SAFE_GROUP_ID=$(echo "$group_id" | tr '/' '_' | tr ':' '_' | tr ' ' '_' | tr '<' '_' | tr '>' '_' | tr '"' '_' | tr '|' '_' | tr '?' '_' | tr '*' '_')

GROUP_FILE="$BACKUP_FOLDER/group_${SAFE_GROUP_ID}.jsonl"

# Create temporary file for this group's events

TEMP_GROUP_FILE=$(mktemp)

# Backup group control events (kinds 9000-9030)

log " Fetching group control events..."

for kind in {9000..9030}; do

timeout 30 nak req -k $kind --tag h="$group_id" "$RELAY_URL" 2>/dev/null >> "$TEMP_GROUP_FILE" || true

done

# Backup group metadata events (kinds 39000-39009)

log " Fetching group metadata events..."

for kind in {39000..39009}; do

timeout 30 nak req -k $kind --tag h="$group_id" "$RELAY_URL" 2>/dev/null >> "$TEMP_GROUP_FILE" || true

# Also try with 'd' tag in case this group uses that

timeout 30 nak req -k $kind --tag d="$group_id" "$RELAY_URL" 2>/dev/null >> "$TEMP_GROUP_FILE" || true

done

# Backup regular group messages (kinds 11, 12 - NIP-29 group messages)

log " Fetching group messages..."

timeout 30 nak req -k 11 --tag h="$group_id" "$RELAY_URL" 2>/dev/null >> "$TEMP_GROUP_FILE" || true

timeout 30 nak req -k 12 --tag h="$group_id" "$RELAY_URL" 2>/dev/null >> "$TEMP_GROUP_FILE" || true

# Try a broader search for any events tagged with this group

log " Fetching other group-related events..."

timeout 60 nak req --tag h="$group_id" "$RELAY_URL" 2>/dev/null >> "$TEMP_GROUP_FILE" || true

# Remove duplicates and sort by created_at timestamp

if [ -s "$TEMP_GROUP_FILE" ]; then

# Remove duplicate events by ID and sort by created_at

cat "$TEMP_GROUP_FILE" | \

jq -c 'select(.id != null)' 2>/dev/null | \

sort -u | \

jq -s 'sort_by(.created_at // 0)' 2>/dev/null | \

jq -c '.[]' 2>/dev/null > "$GROUP_FILE"

EVENT_COUNT=$(wc -l < "$GROUP_FILE" 2>/dev/null || echo "0")

log " Saved $EVENT_COUNT events for group $group_id"

else

log " No events found for group $group_id"

# Create empty file

touch "$GROUP_FILE"

fi

# Cleanup temp file

rm -f "$TEMP_GROUP_FILE"

done

# Cleanup function

cleanup() {

rm -f "$TEMP_GROUPS_FILE" "$GROUP_IDS_FILE" 2>/dev/null || true

}

cleanup

# Count total files and events

TOTAL_FILES=$(find "$BACKUP_FOLDER" -name "group_*.jsonl" -type f | wc -l)

TOTAL_EVENTS=0

if [ "$TOTAL_FILES" -gt 0 ]; then

TOTAL_EVENTS=$(find "$BACKUP_FOLDER" -name "group_*.jsonl" -type f -exec wc -l {} + | tail -n1 | awk '{print $1}')

fi

log "Backup completed successfully!"

log "Total group files: $TOTAL_FILES"

log "Total events: $TOTAL_EVENTS"

log "Backup files saved in: $BACKUP_FOLDER"

Reply to this note

Please Login to reply.

Discussion

No replies yet.