Compare commits
2 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2d93c2f819 | ||
|
|
ce7f7ad11b |
25
README.md
25
README.md
@@ -1,4 +1,29 @@
|
|||||||
A nostr relay in C with sqlite on the back end.
|
A nostr relay in C with sqlite on the back end.
|
||||||
|
|
||||||
|
<!--
|
||||||
|
NOTE FOR ASSISTANTS: When updating the NIPs checklist below, ONLY change [ ] to [x] to mark as complete.
|
||||||
|
Do NOT modify the formatting, add emojis, or change the text. Keep the simple format consistent.
|
||||||
|
-->
|
||||||
|
|
||||||
|
|
||||||
|
### [NIPs](https://github.com/nostr-protocol/nips)
|
||||||
|
|
||||||
|
- [x] NIP-01: Basic protocol flow implementation
|
||||||
|
- [x] NIP-09: Event deletion
|
||||||
|
- [ ] NIP-11: Relay information document
|
||||||
|
- [ ] NIP-12: Generic tag queries
|
||||||
|
- [ ] NIP-13: Proof of Work
|
||||||
|
- [x] NIP-15: End of Stored Events Notice
|
||||||
|
- [ ] NIP-16: Event Treatment
|
||||||
|
- [x] NIP-20: Command Results
|
||||||
|
- [ ] NIP-22: Event `created_at` Limits
|
||||||
|
- [ ] NIP-25: Reactions
|
||||||
|
- [ ] NIP-26: Delegated Event Signing
|
||||||
|
- [ ] NIP-28: Public Chat
|
||||||
|
- [ ] NIP-33: Parameterized Replaceable Events
|
||||||
|
- [ ] NIP-40: Expiration Timestamp
|
||||||
|
- [ ] NIP-42: Authentication of clients to relays
|
||||||
|
- [ ] NIP-45: Counting results. [experimental](#count)
|
||||||
|
- [ ] NIP-50: Keywords filter. [experimental](#search)
|
||||||
|
- [ ] NIP-70: Protected Events
|
||||||
|
|
||||||
|
|||||||
389
build_and_push.sh
Executable file
389
build_and_push.sh
Executable file
@@ -0,0 +1,389 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
print_status() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||||
|
print_success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||||
|
print_warning() { echo -e "${YELLOW}[WARNING]${NC} $1"; }
|
||||||
|
print_error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||||
|
|
||||||
|
# Global variables
|
||||||
|
COMMIT_MESSAGE=""
|
||||||
|
RELEASE_MODE=false
|
||||||
|
|
||||||
|
# Parse command line arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
-r|--release)
|
||||||
|
RELEASE_MODE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-h|--help)
|
||||||
|
show_usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
# First non-flag argument is the commit message
|
||||||
|
if [[ -z "$COMMIT_MESSAGE" ]]; then
|
||||||
|
COMMIT_MESSAGE="$1"
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
show_usage() {
|
||||||
|
echo "C-Relay Build and Push Script"
|
||||||
|
echo ""
|
||||||
|
echo "Usage:"
|
||||||
|
echo " $0 \"commit message\" - Default: compile, increment patch, commit & push"
|
||||||
|
echo " $0 -r \"commit message\" - Release: compile x86+arm64, increment minor, create release"
|
||||||
|
echo ""
|
||||||
|
echo "Examples:"
|
||||||
|
echo " $0 \"Fixed event validation bug\""
|
||||||
|
echo " $0 --release \"Major release with new features\""
|
||||||
|
echo ""
|
||||||
|
echo "Default Mode (patch increment):"
|
||||||
|
echo " - Compile C-Relay"
|
||||||
|
echo " - Increment patch version (v1.2.3 → v1.2.4)"
|
||||||
|
echo " - Git add, commit with message, and push"
|
||||||
|
echo ""
|
||||||
|
echo "Release Mode (-r flag):"
|
||||||
|
echo " - Compile C-Relay for x86_64 and arm64"
|
||||||
|
echo " - Increment minor version, zero patch (v1.2.3 → v1.3.0)"
|
||||||
|
echo " - Git add, commit, push, and create Gitea release"
|
||||||
|
echo ""
|
||||||
|
echo "Requirements for Release Mode:"
|
||||||
|
echo " - ARM64 cross-compiler: sudo apt install gcc-aarch64-linux-gnu"
|
||||||
|
echo " - Gitea token in ~/.gitea_token for release uploads"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Validate inputs
|
||||||
|
if [[ -z "$COMMIT_MESSAGE" ]]; then
|
||||||
|
print_error "Commit message is required"
|
||||||
|
echo ""
|
||||||
|
show_usage
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if we're in a git repository
|
||||||
|
check_git_repo() {
|
||||||
|
if ! git rev-parse --git-dir > /dev/null 2>&1; then
|
||||||
|
print_error "Not in a git repository"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to get current version and increment appropriately
|
||||||
|
increment_version() {
|
||||||
|
local increment_type="$1" # "patch" or "minor"
|
||||||
|
|
||||||
|
print_status "Getting current version..."
|
||||||
|
|
||||||
|
# Get the highest version tag (not chronologically latest)
|
||||||
|
LATEST_TAG=$(git tag -l 'v*.*.*' | sort -V | tail -n 1 || echo "")
|
||||||
|
if [[ -z "$LATEST_TAG" ]]; then
|
||||||
|
LATEST_TAG="v0.0.0"
|
||||||
|
print_warning "No version tags found, starting from $LATEST_TAG"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract version components (remove 'v' prefix)
|
||||||
|
VERSION=${LATEST_TAG#v}
|
||||||
|
|
||||||
|
# Parse major.minor.patch using regex
|
||||||
|
if [[ $VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||||
|
MAJOR=${BASH_REMATCH[1]}
|
||||||
|
MINOR=${BASH_REMATCH[2]}
|
||||||
|
PATCH=${BASH_REMATCH[3]}
|
||||||
|
else
|
||||||
|
print_error "Invalid version format in tag: $LATEST_TAG"
|
||||||
|
print_error "Expected format: v0.1.0"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Increment version based on type
|
||||||
|
if [[ "$increment_type" == "minor" ]]; then
|
||||||
|
# Minor release: increment minor, zero patch
|
||||||
|
NEW_MINOR=$((MINOR + 1))
|
||||||
|
NEW_PATCH=0
|
||||||
|
NEW_VERSION="v${MAJOR}.${NEW_MINOR}.${NEW_PATCH}"
|
||||||
|
print_status "Release mode: incrementing minor version"
|
||||||
|
else
|
||||||
|
# Default: increment patch
|
||||||
|
NEW_PATCH=$((PATCH + 1))
|
||||||
|
NEW_VERSION="v${MAJOR}.${MINOR}.${NEW_PATCH}"
|
||||||
|
print_status "Default mode: incrementing patch version"
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_status "Current version: $LATEST_TAG"
|
||||||
|
print_status "New version: $NEW_VERSION"
|
||||||
|
|
||||||
|
# Export for use in other functions
|
||||||
|
export NEW_VERSION
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to compile the C-Relay project
|
||||||
|
compile_project() {
|
||||||
|
print_status "Compiling C-Relay..."
|
||||||
|
|
||||||
|
# Clean previous build
|
||||||
|
if make clean > /dev/null 2>&1; then
|
||||||
|
print_success "Cleaned previous build"
|
||||||
|
else
|
||||||
|
print_warning "Clean failed or no Makefile found"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Compile the project
|
||||||
|
if make > /dev/null 2>&1; then
|
||||||
|
print_success "C-Relay compiled successfully"
|
||||||
|
else
|
||||||
|
print_error "Compilation failed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check for ARM64 cross-compiler
|
||||||
|
check_cross_compiler() {
|
||||||
|
if ! command -v aarch64-linux-gnu-gcc > /dev/null 2>&1; then
|
||||||
|
print_error "ARM64/AArch64 cross-compiler not found!"
|
||||||
|
print_error "Install with: sudo apt install gcc-aarch64-linux-gnu"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to build release binaries
|
||||||
|
build_release_binaries() {
|
||||||
|
print_status "Building release binaries..."
|
||||||
|
|
||||||
|
# Build x86_64 version
|
||||||
|
print_status "Building x86_64 version..."
|
||||||
|
make clean > /dev/null 2>&1
|
||||||
|
if make CC=gcc > /dev/null 2>&1; then
|
||||||
|
if [[ -f "src/main" ]]; then
|
||||||
|
cp src/main c-relay-x86_64
|
||||||
|
print_success "x86_64 binary created: c-relay-x86_64"
|
||||||
|
else
|
||||||
|
print_error "x86_64 binary not found after compilation"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_error "x86_64 build failed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check for ARM64 cross-compiler
|
||||||
|
if check_cross_compiler; then
|
||||||
|
# Build ARM64 version
|
||||||
|
print_status "Building ARM64 version..."
|
||||||
|
make clean > /dev/null 2>&1
|
||||||
|
if make CC=aarch64-linux-gnu-gcc > /dev/null 2>&1; then
|
||||||
|
if [[ -f "src/main" ]]; then
|
||||||
|
cp src/main c-relay-arm64
|
||||||
|
print_success "ARM64 binary created: c-relay-arm64"
|
||||||
|
else
|
||||||
|
print_error "ARM64 binary not found after compilation"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_error "ARM64 build failed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
print_warning "ARM64 cross-compiler not available, skipping ARM64 build"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Restore normal build
|
||||||
|
make clean > /dev/null 2>&1
|
||||||
|
make > /dev/null 2>&1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to commit and push changes
|
||||||
|
git_commit_and_push() {
|
||||||
|
print_status "Preparing git commit..."
|
||||||
|
|
||||||
|
# Stage all changes
|
||||||
|
if git add . > /dev/null 2>&1; then
|
||||||
|
print_success "Staged all changes"
|
||||||
|
else
|
||||||
|
print_error "Failed to stage changes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if there are changes to commit
|
||||||
|
if git diff --staged --quiet; then
|
||||||
|
print_warning "No changes to commit"
|
||||||
|
else
|
||||||
|
# Commit changes
|
||||||
|
if git commit -m "$NEW_VERSION - $COMMIT_MESSAGE" > /dev/null 2>&1; then
|
||||||
|
print_success "Committed changes"
|
||||||
|
else
|
||||||
|
print_error "Failed to commit changes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create new git tag
|
||||||
|
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
||||||
|
print_success "Created tag: $NEW_VERSION"
|
||||||
|
else
|
||||||
|
print_warning "Tag $NEW_VERSION already exists"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Push changes and tags
|
||||||
|
print_status "Pushing to remote repository..."
|
||||||
|
if git push > /dev/null 2>&1; then
|
||||||
|
print_success "Pushed changes"
|
||||||
|
else
|
||||||
|
print_error "Failed to push changes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if git push --tags > /dev/null 2>&1; then
|
||||||
|
print_success "Pushed tags"
|
||||||
|
else
|
||||||
|
print_warning "Failed to push tags"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to create Gitea release
|
||||||
|
create_gitea_release() {
|
||||||
|
print_status "Creating Gitea release..."
|
||||||
|
|
||||||
|
# Check for Gitea token
|
||||||
|
if [[ ! -f "$HOME/.gitea_token" ]]; then
|
||||||
|
print_warning "No ~/.gitea_token found. Skipping release creation."
|
||||||
|
print_warning "Create ~/.gitea_token with your Gitea access token to enable releases."
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
local token=$(cat "$HOME/.gitea_token" | tr -d '\n\r')
|
||||||
|
local api_url="https://git.laantungir.net/api/v1/repos/teknari/c-relay"
|
||||||
|
|
||||||
|
# Create release
|
||||||
|
print_status "Creating release $NEW_VERSION..."
|
||||||
|
local response=$(curl -s -X POST "$api_url/releases" \
|
||||||
|
-H "Authorization: token $token" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"tag_name\": \"$NEW_VERSION\", \"name\": \"$NEW_VERSION\", \"body\": \"$COMMIT_MESSAGE\"}")
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"id"'; then
|
||||||
|
print_success "Created release $NEW_VERSION"
|
||||||
|
|
||||||
|
# Upload binaries
|
||||||
|
upload_release_binaries "$api_url" "$token"
|
||||||
|
else
|
||||||
|
print_warning "Release may already exist or creation failed"
|
||||||
|
print_status "Attempting to upload to existing release..."
|
||||||
|
upload_release_binaries "$api_url" "$token"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to upload release binaries
|
||||||
|
upload_release_binaries() {
|
||||||
|
local api_url="$1"
|
||||||
|
local token="$2"
|
||||||
|
|
||||||
|
# Get release ID
|
||||||
|
local release_id=$(curl -s -H "Authorization: token $token" \
|
||||||
|
"$api_url/releases/tags/$NEW_VERSION" | \
|
||||||
|
grep -o '"id":[0-9]*' | head -n1 | cut -d: -f2)
|
||||||
|
|
||||||
|
if [[ -z "$release_id" ]]; then
|
||||||
|
print_error "Could not get release ID for $NEW_VERSION"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Upload x86_64 binary
|
||||||
|
if [[ -f "c-relay-x86_64" ]]; then
|
||||||
|
print_status "Uploading x86_64 binary..."
|
||||||
|
if curl -s -X POST "$api_url/releases/$release_id/assets" \
|
||||||
|
-H "Authorization: token $token" \
|
||||||
|
-F "attachment=@c-relay-x86_64;filename=c-relay-${NEW_VERSION}-linux-x86_64" > /dev/null; then
|
||||||
|
print_success "Uploaded x86_64 binary"
|
||||||
|
else
|
||||||
|
print_warning "Failed to upload x86_64 binary"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Upload ARM64 binary
|
||||||
|
if [[ -f "c-relay-arm64" ]]; then
|
||||||
|
print_status "Uploading ARM64 binary..."
|
||||||
|
if curl -s -X POST "$api_url/releases/$release_id/assets" \
|
||||||
|
-H "Authorization: token $token" \
|
||||||
|
-F "attachment=@c-relay-arm64;filename=c-relay-${NEW_VERSION}-linux-arm64" > /dev/null; then
|
||||||
|
print_success "Uploaded ARM64 binary"
|
||||||
|
else
|
||||||
|
print_warning "Failed to upload ARM64 binary"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to clean up release binaries
|
||||||
|
cleanup_release_binaries() {
|
||||||
|
if [[ -f "c-relay-x86_64" ]]; then
|
||||||
|
rm -f c-relay-x86_64
|
||||||
|
print_status "Cleaned up x86_64 binary"
|
||||||
|
fi
|
||||||
|
if [[ -f "c-relay-arm64" ]]; then
|
||||||
|
rm -f c-relay-arm64
|
||||||
|
print_status "Cleaned up ARM64 binary"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main execution
|
||||||
|
main() {
|
||||||
|
print_status "C-Relay Build and Push Script"
|
||||||
|
|
||||||
|
# Check prerequisites
|
||||||
|
check_git_repo
|
||||||
|
|
||||||
|
if [[ "$RELEASE_MODE" == true ]]; then
|
||||||
|
print_status "=== RELEASE MODE ==="
|
||||||
|
|
||||||
|
# Increment minor version for releases
|
||||||
|
increment_version "minor"
|
||||||
|
|
||||||
|
# Compile project first
|
||||||
|
compile_project
|
||||||
|
|
||||||
|
# Build release binaries
|
||||||
|
build_release_binaries
|
||||||
|
|
||||||
|
# Commit and push
|
||||||
|
git_commit_and_push
|
||||||
|
|
||||||
|
# Create Gitea release with binaries
|
||||||
|
create_gitea_release
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
cleanup_release_binaries
|
||||||
|
|
||||||
|
print_success "Release $NEW_VERSION completed successfully!"
|
||||||
|
print_status "Binaries uploaded to Gitea release"
|
||||||
|
|
||||||
|
else
|
||||||
|
print_status "=== DEFAULT MODE ==="
|
||||||
|
|
||||||
|
# Increment patch version for regular commits
|
||||||
|
increment_version "patch"
|
||||||
|
|
||||||
|
# Compile project
|
||||||
|
compile_project
|
||||||
|
|
||||||
|
# Commit and push
|
||||||
|
git_commit_and_push
|
||||||
|
|
||||||
|
print_success "Build and push completed successfully!"
|
||||||
|
print_status "Version $NEW_VERSION pushed to repository"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Execute main function
|
||||||
|
main
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -87,4 +87,95 @@ BEGIN
|
|||||||
AND kind = NEW.kind
|
AND kind = NEW.kind
|
||||||
AND event_type = 'replaceable'
|
AND event_type = 'replaceable'
|
||||||
AND id != NEW.id;
|
AND id != NEW.id;
|
||||||
END;
|
END;
|
||||||
|
|
||||||
|
-- Persistent Subscriptions Logging Tables (Phase 2)
|
||||||
|
-- Optional database logging for subscription analytics and debugging
|
||||||
|
|
||||||
|
-- Subscription events log
|
||||||
|
CREATE TABLE subscription_events (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
subscription_id TEXT NOT NULL, -- Subscription ID from client
|
||||||
|
client_ip TEXT NOT NULL, -- Client IP address
|
||||||
|
event_type TEXT NOT NULL CHECK (event_type IN ('created', 'closed', 'expired', 'disconnected')),
|
||||||
|
filter_json TEXT, -- JSON representation of filters (for created events)
|
||||||
|
events_sent INTEGER DEFAULT 0, -- Number of events sent to this subscription
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
||||||
|
ended_at INTEGER, -- When subscription ended (for closed/expired/disconnected)
|
||||||
|
duration INTEGER -- Computed: ended_at - created_at
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Subscription metrics summary
|
||||||
|
CREATE TABLE subscription_metrics (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
date TEXT NOT NULL, -- Date (YYYY-MM-DD)
|
||||||
|
total_created INTEGER DEFAULT 0, -- Total subscriptions created
|
||||||
|
total_closed INTEGER DEFAULT 0, -- Total subscriptions closed
|
||||||
|
total_events_broadcast INTEGER DEFAULT 0, -- Total events broadcast
|
||||||
|
avg_duration REAL DEFAULT 0, -- Average subscription duration
|
||||||
|
peak_concurrent INTEGER DEFAULT 0, -- Peak concurrent subscriptions
|
||||||
|
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
||||||
|
UNIQUE(date)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Event broadcasting log (optional, for detailed analytics)
|
||||||
|
CREATE TABLE event_broadcasts (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
event_id TEXT NOT NULL, -- Event ID that was broadcast
|
||||||
|
subscription_id TEXT NOT NULL, -- Subscription that received it
|
||||||
|
client_ip TEXT NOT NULL, -- Client IP
|
||||||
|
broadcast_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
||||||
|
FOREIGN KEY (event_id) REFERENCES events(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes for subscription logging performance
|
||||||
|
CREATE INDEX idx_subscription_events_id ON subscription_events(subscription_id);
|
||||||
|
CREATE INDEX idx_subscription_events_type ON subscription_events(event_type);
|
||||||
|
CREATE INDEX idx_subscription_events_created ON subscription_events(created_at DESC);
|
||||||
|
CREATE INDEX idx_subscription_events_client ON subscription_events(client_ip);
|
||||||
|
|
||||||
|
CREATE INDEX idx_subscription_metrics_date ON subscription_metrics(date DESC);
|
||||||
|
|
||||||
|
CREATE INDEX idx_event_broadcasts_event ON event_broadcasts(event_id);
|
||||||
|
CREATE INDEX idx_event_broadcasts_sub ON event_broadcasts(subscription_id);
|
||||||
|
CREATE INDEX idx_event_broadcasts_time ON event_broadcasts(broadcast_at DESC);
|
||||||
|
|
||||||
|
-- Trigger to update subscription duration when ended
|
||||||
|
CREATE TRIGGER update_subscription_duration
|
||||||
|
AFTER UPDATE OF ended_at ON subscription_events
|
||||||
|
WHEN NEW.ended_at IS NOT NULL AND OLD.ended_at IS NULL
|
||||||
|
BEGIN
|
||||||
|
UPDATE subscription_events
|
||||||
|
SET duration = NEW.ended_at - NEW.created_at
|
||||||
|
WHERE id = NEW.id;
|
||||||
|
END;
|
||||||
|
|
||||||
|
-- View for subscription analytics
|
||||||
|
CREATE VIEW subscription_analytics AS
|
||||||
|
SELECT
|
||||||
|
date(created_at, 'unixepoch') as date,
|
||||||
|
COUNT(*) as subscriptions_created,
|
||||||
|
COUNT(CASE WHEN ended_at IS NOT NULL THEN 1 END) as subscriptions_ended,
|
||||||
|
AVG(CASE WHEN duration IS NOT NULL THEN duration END) as avg_duration_seconds,
|
||||||
|
MAX(events_sent) as max_events_sent,
|
||||||
|
AVG(events_sent) as avg_events_sent,
|
||||||
|
COUNT(DISTINCT client_ip) as unique_clients
|
||||||
|
FROM subscription_events
|
||||||
|
GROUP BY date(created_at, 'unixepoch')
|
||||||
|
ORDER BY date DESC;
|
||||||
|
|
||||||
|
-- View for current active subscriptions (from log perspective)
|
||||||
|
CREATE VIEW active_subscriptions_log AS
|
||||||
|
SELECT
|
||||||
|
subscription_id,
|
||||||
|
client_ip,
|
||||||
|
filter_json,
|
||||||
|
events_sent,
|
||||||
|
created_at,
|
||||||
|
(strftime('%s', 'now') - created_at) as duration_seconds
|
||||||
|
FROM subscription_events
|
||||||
|
WHERE event_type = 'created'
|
||||||
|
AND subscription_id NOT IN (
|
||||||
|
SELECT subscription_id FROM subscription_events
|
||||||
|
WHERE event_type IN ('closed', 'expired', 'disconnected')
|
||||||
|
);
|
||||||
@@ -1,337 +0,0 @@
|
|||||||
# Advanced Nostr Relay Schema Design
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
This document outlines the design for an advanced multi-table schema that enforces Nostr protocol compliance at the database level, with separate tables for different event types based on their storage and replacement characteristics.
|
|
||||||
|
|
||||||
## Event Type Classification
|
|
||||||
|
|
||||||
Based on the Nostr specification, events are classified into four categories:
|
|
||||||
|
|
||||||
### 1. Regular Events
|
|
||||||
- **Kinds**: `1000 <= n < 10000` || `4 <= n < 45` || `n == 1` || `n == 2`
|
|
||||||
- **Storage Policy**: All events stored permanently
|
|
||||||
- **Examples**: Text notes (1), Reposts (6), Reactions (7), Direct Messages (4)
|
|
||||||
|
|
||||||
### 2. Replaceable Events
|
|
||||||
- **Kinds**: `10000 <= n < 20000` || `n == 0` || `n == 3`
|
|
||||||
- **Storage Policy**: Only latest per `(pubkey, kind)` combination
|
|
||||||
- **Replacement Logic**: Latest `created_at`, then lowest `id` lexically
|
|
||||||
- **Examples**: Metadata (0), Contacts (3), Mute List (10000)
|
|
||||||
|
|
||||||
### 3. Ephemeral Events
|
|
||||||
- **Kinds**: `20000 <= n < 30000`
|
|
||||||
- **Storage Policy**: Not expected to be stored (optional temporary storage)
|
|
||||||
- **Examples**: Typing indicators, presence updates, ephemeral messages
|
|
||||||
|
|
||||||
### 4. Addressable Events
|
|
||||||
- **Kinds**: `30000 <= n < 40000`
|
|
||||||
- **Storage Policy**: Only latest per `(pubkey, kind, d_tag)` combination
|
|
||||||
- **Replacement Logic**: Same as replaceable events
|
|
||||||
- **Examples**: Long-form content (30023), Application-specific data
|
|
||||||
|
|
||||||
## SQLite JSON Capabilities Research
|
|
||||||
|
|
||||||
SQLite provides powerful JSON functions that could be leveraged for tag storage:
|
|
||||||
|
|
||||||
### Core JSON Functions
|
|
||||||
```sql
|
|
||||||
-- Extract specific values
|
|
||||||
json_extract(column, '$.path')
|
|
||||||
|
|
||||||
-- Iterate through arrays
|
|
||||||
json_each(json_array_column)
|
|
||||||
|
|
||||||
-- Flatten nested structures
|
|
||||||
json_tree(json_column)
|
|
||||||
|
|
||||||
-- Validate JSON structure
|
|
||||||
json_valid(column)
|
|
||||||
|
|
||||||
-- Array operations
|
|
||||||
json_array_length(column)
|
|
||||||
json_extract(column, '$[0]') -- First element
|
|
||||||
```
|
|
||||||
|
|
||||||
### Tag Query Examples
|
|
||||||
|
|
||||||
#### Find all 'e' tag references:
|
|
||||||
```sql
|
|
||||||
SELECT
|
|
||||||
id,
|
|
||||||
json_extract(value, '$[1]') as referenced_event_id,
|
|
||||||
json_extract(value, '$[2]') as relay_hint,
|
|
||||||
json_extract(value, '$[3]') as marker
|
|
||||||
FROM events, json_each(tags)
|
|
||||||
WHERE json_extract(value, '$[0]') = 'e';
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Find events with specific hashtags:
|
|
||||||
```sql
|
|
||||||
SELECT id, content
|
|
||||||
FROM events, json_each(tags)
|
|
||||||
WHERE json_extract(value, '$[0]') = 't'
|
|
||||||
AND json_extract(value, '$[1]') = 'bitcoin';
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Extract 'd' tag for addressable events:
|
|
||||||
```sql
|
|
||||||
SELECT
|
|
||||||
id,
|
|
||||||
json_extract(value, '$[1]') as d_tag_value
|
|
||||||
FROM events, json_each(tags)
|
|
||||||
WHERE json_extract(value, '$[0]') = 'd'
|
|
||||||
LIMIT 1;
|
|
||||||
```
|
|
||||||
|
|
||||||
### JSON Functional Indexes
|
|
||||||
```sql
|
|
||||||
-- Index on hashtags
|
|
||||||
CREATE INDEX idx_hashtags ON events(
|
|
||||||
json_extract(tags, '$[*][1]')
|
|
||||||
) WHERE json_extract(tags, '$[*][0]') = 't';
|
|
||||||
|
|
||||||
-- Index on 'd' tags for addressable events
|
|
||||||
CREATE INDEX idx_d_tags ON events_addressable(
|
|
||||||
json_extract(tags, '$[*][1]')
|
|
||||||
) WHERE json_extract(tags, '$[*][0]') = 'd';
|
|
||||||
```
|
|
||||||
|
|
||||||
## Proposed Schema Design
|
|
||||||
|
|
||||||
### Option 1: Separate Tables with JSON Tags
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Regular Events (permanent storage)
|
|
||||||
CREATE TABLE events_regular (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
pubkey TEXT NOT NULL,
|
|
||||||
created_at INTEGER NOT NULL,
|
|
||||||
kind INTEGER NOT NULL,
|
|
||||||
content TEXT NOT NULL,
|
|
||||||
sig TEXT NOT NULL,
|
|
||||||
tags JSON,
|
|
||||||
first_seen INTEGER DEFAULT (strftime('%s', 'now')),
|
|
||||||
CONSTRAINT kind_regular CHECK (
|
|
||||||
(kind >= 1000 AND kind < 10000) OR
|
|
||||||
(kind >= 4 AND kind < 45) OR
|
|
||||||
kind = 1 OR kind = 2
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Replaceable Events (latest per pubkey+kind)
|
|
||||||
CREATE TABLE events_replaceable (
|
|
||||||
pubkey TEXT NOT NULL,
|
|
||||||
kind INTEGER NOT NULL,
|
|
||||||
id TEXT NOT NULL,
|
|
||||||
created_at INTEGER NOT NULL,
|
|
||||||
content TEXT NOT NULL,
|
|
||||||
sig TEXT NOT NULL,
|
|
||||||
tags JSON,
|
|
||||||
replaced_at INTEGER DEFAULT (strftime('%s', 'now')),
|
|
||||||
PRIMARY KEY (pubkey, kind),
|
|
||||||
CONSTRAINT kind_replaceable CHECK (
|
|
||||||
(kind >= 10000 AND kind < 20000) OR
|
|
||||||
kind = 0 OR kind = 3
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Ephemeral Events (temporary/optional storage)
|
|
||||||
CREATE TABLE events_ephemeral (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
pubkey TEXT NOT NULL,
|
|
||||||
created_at INTEGER NOT NULL,
|
|
||||||
kind INTEGER NOT NULL,
|
|
||||||
content TEXT NOT NULL,
|
|
||||||
sig TEXT NOT NULL,
|
|
||||||
tags JSON,
|
|
||||||
expires_at INTEGER DEFAULT (strftime('%s', 'now', '+1 hour')),
|
|
||||||
CONSTRAINT kind_ephemeral CHECK (
|
|
||||||
kind >= 20000 AND kind < 30000
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Addressable Events (latest per pubkey+kind+d_tag)
|
|
||||||
CREATE TABLE events_addressable (
|
|
||||||
pubkey TEXT NOT NULL,
|
|
||||||
kind INTEGER NOT NULL,
|
|
||||||
d_tag TEXT NOT NULL,
|
|
||||||
id TEXT NOT NULL,
|
|
||||||
created_at INTEGER NOT NULL,
|
|
||||||
content TEXT NOT NULL,
|
|
||||||
sig TEXT NOT NULL,
|
|
||||||
tags JSON,
|
|
||||||
replaced_at INTEGER DEFAULT (strftime('%s', 'now')),
|
|
||||||
PRIMARY KEY (pubkey, kind, d_tag),
|
|
||||||
CONSTRAINT kind_addressable CHECK (
|
|
||||||
kind >= 30000 AND kind < 40000
|
|
||||||
)
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
### Indexes for Performance
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Regular events indexes
|
|
||||||
CREATE INDEX idx_regular_pubkey ON events_regular(pubkey);
|
|
||||||
CREATE INDEX idx_regular_kind ON events_regular(kind);
|
|
||||||
CREATE INDEX idx_regular_created_at ON events_regular(created_at);
|
|
||||||
CREATE INDEX idx_regular_kind_created_at ON events_regular(kind, created_at);
|
|
||||||
|
|
||||||
-- Replaceable events indexes
|
|
||||||
CREATE INDEX idx_replaceable_created_at ON events_replaceable(created_at);
|
|
||||||
CREATE INDEX idx_replaceable_id ON events_replaceable(id);
|
|
||||||
|
|
||||||
-- Ephemeral events indexes
|
|
||||||
CREATE INDEX idx_ephemeral_expires_at ON events_ephemeral(expires_at);
|
|
||||||
CREATE INDEX idx_ephemeral_pubkey ON events_ephemeral(pubkey);
|
|
||||||
|
|
||||||
-- Addressable events indexes
|
|
||||||
CREATE INDEX idx_addressable_created_at ON events_addressable(created_at);
|
|
||||||
CREATE INDEX idx_addressable_id ON events_addressable(id);
|
|
||||||
|
|
||||||
-- JSON tag indexes (examples)
|
|
||||||
CREATE INDEX idx_regular_e_tags ON events_regular(
|
|
||||||
json_extract(tags, '$[*][1]')
|
|
||||||
) WHERE json_extract(tags, '$[*][0]') = 'e';
|
|
||||||
|
|
||||||
CREATE INDEX idx_regular_p_tags ON events_regular(
|
|
||||||
json_extract(tags, '$[*][1]')
|
|
||||||
) WHERE json_extract(tags, '$[*][0]') = 'p';
|
|
||||||
```
|
|
||||||
|
|
||||||
### Option 2: Unified Tag Table Approach
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Unified tag storage (alternative to JSON)
|
|
||||||
CREATE TABLE tags_unified (
|
|
||||||
event_id TEXT NOT NULL,
|
|
||||||
event_type TEXT NOT NULL, -- 'regular', 'replaceable', 'ephemeral', 'addressable'
|
|
||||||
tag_index INTEGER NOT NULL, -- Position in tag array
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
value TEXT NOT NULL,
|
|
||||||
param_2 TEXT, -- Third element if present
|
|
||||||
param_3 TEXT, -- Fourth element if present
|
|
||||||
param_json TEXT, -- JSON for additional parameters
|
|
||||||
PRIMARY KEY (event_id, tag_index)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX idx_tags_name_value ON tags_unified(name, value);
|
|
||||||
CREATE INDEX idx_tags_event_type ON tags_unified(event_type);
|
|
||||||
```
|
|
||||||
|
|
||||||
## Implementation Strategy
|
|
||||||
|
|
||||||
### 1. Kind Classification Function (C Code)
|
|
||||||
```c
|
|
||||||
typedef enum {
|
|
||||||
EVENT_TYPE_REGULAR,
|
|
||||||
EVENT_TYPE_REPLACEABLE,
|
|
||||||
EVENT_TYPE_EPHEMERAL,
|
|
||||||
EVENT_TYPE_ADDRESSABLE,
|
|
||||||
EVENT_TYPE_INVALID
|
|
||||||
} event_type_t;
|
|
||||||
|
|
||||||
event_type_t classify_event_kind(int kind) {
|
|
||||||
if ((kind >= 1000 && kind < 10000) ||
|
|
||||||
(kind >= 4 && kind < 45) ||
|
|
||||||
kind == 1 || kind == 2) {
|
|
||||||
return EVENT_TYPE_REGULAR;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((kind >= 10000 && kind < 20000) ||
|
|
||||||
kind == 0 || kind == 3) {
|
|
||||||
return EVENT_TYPE_REPLACEABLE;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (kind >= 20000 && kind < 30000) {
|
|
||||||
return EVENT_TYPE_EPHEMERAL;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (kind >= 30000 && kind < 40000) {
|
|
||||||
return EVENT_TYPE_ADDRESSABLE;
|
|
||||||
}
|
|
||||||
|
|
||||||
return EVENT_TYPE_INVALID;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Replacement Logic for Replaceable Events
|
|
||||||
```sql
|
|
||||||
-- Trigger for replaceable events
|
|
||||||
CREATE TRIGGER replace_event_on_insert
|
|
||||||
BEFORE INSERT ON events_replaceable
|
|
||||||
FOR EACH ROW
|
|
||||||
WHEN EXISTS (
|
|
||||||
SELECT 1 FROM events_replaceable
|
|
||||||
WHERE pubkey = NEW.pubkey AND kind = NEW.kind
|
|
||||||
)
|
|
||||||
BEGIN
|
|
||||||
DELETE FROM events_replaceable
|
|
||||||
WHERE pubkey = NEW.pubkey
|
|
||||||
AND kind = NEW.kind
|
|
||||||
AND (
|
|
||||||
created_at < NEW.created_at OR
|
|
||||||
(created_at = NEW.created_at AND id > NEW.id)
|
|
||||||
);
|
|
||||||
END;
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. D-Tag Extraction for Addressable Events
|
|
||||||
```c
|
|
||||||
char* extract_d_tag(cJSON* tags) {
|
|
||||||
if (!tags || !cJSON_IsArray(tags)) {
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
cJSON* tag;
|
|
||||||
cJSON_ArrayForEach(tag, tags) {
|
|
||||||
if (cJSON_IsArray(tag) && cJSON_GetArraySize(tag) >= 2) {
|
|
||||||
cJSON* tag_name = cJSON_GetArrayItem(tag, 0);
|
|
||||||
cJSON* tag_value = cJSON_GetArrayItem(tag, 1);
|
|
||||||
|
|
||||||
if (cJSON_IsString(tag_name) && cJSON_IsString(tag_value)) {
|
|
||||||
if (strcmp(cJSON_GetStringValue(tag_name), "d") == 0) {
|
|
||||||
return strdup(cJSON_GetStringValue(tag_value));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return strdup(""); // Default empty d-tag
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Advantages of This Design
|
|
||||||
|
|
||||||
### 1. Protocol Compliance
|
|
||||||
- **Enforced at DB level**: Schema constraints prevent invalid event storage
|
|
||||||
- **Automatic replacement**: Triggers handle replaceable/addressable event logic
|
|
||||||
- **Type safety**: Separate tables ensure correct handling per event type
|
|
||||||
|
|
||||||
### 2. Performance Benefits
|
|
||||||
- **Targeted indexes**: Each table optimized for its access patterns
|
|
||||||
- **Reduced storage**: Ephemeral events can be auto-expired
|
|
||||||
- **Query optimization**: SQLite can optimize queries per table structure
|
|
||||||
|
|
||||||
### 3. JSON Tag Benefits
|
|
||||||
- **Atomic storage**: Tags stored with their event
|
|
||||||
- **Rich querying**: SQLite JSON functions enable complex tag queries
|
|
||||||
- **Schema flexibility**: Can handle arbitrary tag structures
|
|
||||||
- **Functional indexes**: Index specific tag patterns efficiently
|
|
||||||
|
|
||||||
## Migration Strategy
|
|
||||||
|
|
||||||
1. **Phase 1**: Create new schema alongside existing
|
|
||||||
2. **Phase 2**: Implement kind classification and routing logic
|
|
||||||
3. **Phase 3**: Migrate existing data to appropriate tables
|
|
||||||
4. **Phase 4**: Update application logic to use new tables
|
|
||||||
5. **Phase 5**: Drop old schema after verification
|
|
||||||
|
|
||||||
## Next Steps for Implementation
|
|
||||||
|
|
||||||
1. **Prototype JSON performance**: Create test database with sample data
|
|
||||||
2. **Benchmark query patterns**: Compare JSON vs normalized approaches
|
|
||||||
3. **Implement kind classification**: Add routing logic to C code
|
|
||||||
4. **Create migration scripts**: Handle existing data transformation
|
|
||||||
5. **Update test suite**: Verify compliance with new schema
|
|
||||||
@@ -1,416 +0,0 @@
|
|||||||
# Final Schema Recommendation: Hybrid Single Table Approach
|
|
||||||
|
|
||||||
## Executive Summary
|
|
||||||
|
|
||||||
After analyzing the subscription query complexity, **the multi-table approach creates more problems than it solves**. REQ filters don't align with storage semantics - clients filter by kind, author, and tags regardless of event type classification.
|
|
||||||
|
|
||||||
**Recommendation: Modified Single Table with Event Type Classification**
|
|
||||||
|
|
||||||
## The Multi-Table Problem
|
|
||||||
|
|
||||||
### REQ Filter Reality Check
|
|
||||||
- Clients send: `{"kinds": [1, 0, 30023], "authors": ["pubkey"], "#p": ["target"]}`
|
|
||||||
- Multi-table requires: 3 separate queries + UNION + complex ordering
|
|
||||||
- Single table requires: 1 query with simple WHERE conditions
|
|
||||||
|
|
||||||
### Query Complexity Explosion
|
|
||||||
```sql
|
|
||||||
-- Multi-table nightmare for simple filter
|
|
||||||
WITH results AS (
|
|
||||||
SELECT * FROM events_regular WHERE kind = 1 AND pubkey = ?
|
|
||||||
UNION ALL
|
|
||||||
SELECT * FROM events_replaceable WHERE kind = 0 AND pubkey = ?
|
|
||||||
UNION ALL
|
|
||||||
SELECT * FROM events_addressable WHERE kind = 30023 AND pubkey = ?
|
|
||||||
)
|
|
||||||
SELECT r.* FROM results r
|
|
||||||
JOIN multiple_tag_tables t ON complex_conditions
|
|
||||||
ORDER BY created_at DESC, id ASC LIMIT ?;
|
|
||||||
|
|
||||||
-- vs Single table simplicity
|
|
||||||
SELECT e.* FROM events e, json_each(e.tags) t
|
|
||||||
WHERE e.kind IN (1, 0, 30023)
|
|
||||||
AND e.pubkey = ?
|
|
||||||
AND json_extract(t.value, '$[0]') = 'p'
|
|
||||||
AND json_extract(t.value, '$[1]') = ?
|
|
||||||
ORDER BY e.created_at DESC, e.id ASC LIMIT ?;
|
|
||||||
```
|
|
||||||
|
|
||||||
## Recommended Schema: Hybrid Approach
|
|
||||||
|
|
||||||
### Core Design Philosophy
|
|
||||||
- **Single table for REQ query simplicity**
|
|
||||||
- **Event type classification for protocol compliance**
|
|
||||||
- **JSON tags for atomic storage and rich querying**
|
|
||||||
- **Partial unique constraints for replacement logic**
|
|
||||||
|
|
||||||
### Schema Definition
|
|
||||||
|
|
||||||
```sql
|
|
||||||
CREATE TABLE events (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
pubkey TEXT NOT NULL,
|
|
||||||
created_at INTEGER NOT NULL,
|
|
||||||
kind INTEGER NOT NULL,
|
|
||||||
event_type TEXT NOT NULL CHECK (event_type IN ('regular', 'replaceable', 'ephemeral', 'addressable')),
|
|
||||||
content TEXT NOT NULL,
|
|
||||||
sig TEXT NOT NULL,
|
|
||||||
tags JSON NOT NULL DEFAULT '[]',
|
|
||||||
first_seen INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
|
||||||
|
|
||||||
-- Additional fields for addressable events
|
|
||||||
d_tag TEXT GENERATED ALWAYS AS (
|
|
||||||
CASE
|
|
||||||
WHEN event_type = 'addressable' THEN
|
|
||||||
json_extract(tags, '$[*][1]')
|
|
||||||
FROM json_each(tags)
|
|
||||||
WHERE json_extract(value, '$[0]') = 'd'
|
|
||||||
LIMIT 1
|
|
||||||
ELSE NULL
|
|
||||||
END
|
|
||||||
) STORED,
|
|
||||||
|
|
||||||
-- Replacement tracking
|
|
||||||
replaced_at INTEGER,
|
|
||||||
|
|
||||||
-- Protocol compliance constraints
|
|
||||||
CONSTRAINT unique_replaceable
|
|
||||||
UNIQUE (pubkey, kind)
|
|
||||||
WHERE event_type = 'replaceable',
|
|
||||||
|
|
||||||
CONSTRAINT unique_addressable
|
|
||||||
UNIQUE (pubkey, kind, d_tag)
|
|
||||||
WHERE event_type = 'addressable' AND d_tag IS NOT NULL
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
### Event Type Classification Function
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Function to determine event type from kind
|
|
||||||
CREATE VIEW event_type_lookup AS
|
|
||||||
SELECT
|
|
||||||
CASE
|
|
||||||
WHEN (kind >= 1000 AND kind < 10000) OR
|
|
||||||
(kind >= 4 AND kind < 45) OR
|
|
||||||
kind = 1 OR kind = 2 THEN 'regular'
|
|
||||||
WHEN (kind >= 10000 AND kind < 20000) OR
|
|
||||||
kind = 0 OR kind = 3 THEN 'replaceable'
|
|
||||||
WHEN kind >= 20000 AND kind < 30000 THEN 'ephemeral'
|
|
||||||
WHEN kind >= 30000 AND kind < 40000 THEN 'addressable'
|
|
||||||
ELSE 'unknown'
|
|
||||||
END as event_type,
|
|
||||||
kind
|
|
||||||
FROM (
|
|
||||||
-- Generate all possible kind values for lookup
|
|
||||||
WITH RECURSIVE kinds(kind) AS (
|
|
||||||
SELECT 0
|
|
||||||
UNION ALL
|
|
||||||
SELECT kind + 1 FROM kinds WHERE kind < 65535
|
|
||||||
)
|
|
||||||
SELECT kind FROM kinds
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
### Performance Indexes
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Core query patterns
|
|
||||||
CREATE INDEX idx_events_pubkey ON events(pubkey);
|
|
||||||
CREATE INDEX idx_events_kind ON events(kind);
|
|
||||||
CREATE INDEX idx_events_created_at ON events(created_at DESC);
|
|
||||||
CREATE INDEX idx_events_event_type ON events(event_type);
|
|
||||||
|
|
||||||
-- Composite indexes for common filters
|
|
||||||
CREATE INDEX idx_events_pubkey_created_at ON events(pubkey, created_at DESC);
|
|
||||||
CREATE INDEX idx_events_kind_created_at ON events(kind, created_at DESC);
|
|
||||||
CREATE INDEX idx_events_type_created_at ON events(event_type, created_at DESC);
|
|
||||||
|
|
||||||
-- JSON tag indexes for common patterns
|
|
||||||
CREATE INDEX idx_events_e_tags ON events(
|
|
||||||
json_extract(tags, '$[*][1]')
|
|
||||||
) WHERE json_extract(tags, '$[*][0]') = 'e';
|
|
||||||
|
|
||||||
CREATE INDEX idx_events_p_tags ON events(
|
|
||||||
json_extract(tags, '$[*][1]')
|
|
||||||
) WHERE json_extract(tags, '$[*][0]') = 'p';
|
|
||||||
|
|
||||||
CREATE INDEX idx_events_hashtags ON events(
|
|
||||||
json_extract(tags, '$[*][1]')
|
|
||||||
) WHERE json_extract(tags, '$[*][0]') = 't';
|
|
||||||
|
|
||||||
-- Addressable events d_tag index
|
|
||||||
CREATE INDEX idx_events_d_tag ON events(d_tag)
|
|
||||||
WHERE event_type = 'addressable' AND d_tag IS NOT NULL;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Replacement Logic Implementation
|
|
||||||
|
|
||||||
#### Replaceable Events Trigger
|
|
||||||
```sql
|
|
||||||
CREATE TRIGGER handle_replaceable_events
|
|
||||||
BEFORE INSERT ON events
|
|
||||||
FOR EACH ROW
|
|
||||||
WHEN NEW.event_type = 'replaceable'
|
|
||||||
BEGIN
|
|
||||||
-- Delete older replaceable events with same pubkey+kind
|
|
||||||
DELETE FROM events
|
|
||||||
WHERE event_type = 'replaceable'
|
|
||||||
AND pubkey = NEW.pubkey
|
|
||||||
AND kind = NEW.kind
|
|
||||||
AND (
|
|
||||||
created_at < NEW.created_at OR
|
|
||||||
(created_at = NEW.created_at AND id > NEW.id)
|
|
||||||
);
|
|
||||||
END;
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Addressable Events Trigger
|
|
||||||
```sql
|
|
||||||
CREATE TRIGGER handle_addressable_events
|
|
||||||
BEFORE INSERT ON events
|
|
||||||
FOR EACH ROW
|
|
||||||
WHEN NEW.event_type = 'addressable'
|
|
||||||
BEGIN
|
|
||||||
-- Delete older addressable events with same pubkey+kind+d_tag
|
|
||||||
DELETE FROM events
|
|
||||||
WHERE event_type = 'addressable'
|
|
||||||
AND pubkey = NEW.pubkey
|
|
||||||
AND kind = NEW.kind
|
|
||||||
AND d_tag = NEW.d_tag
|
|
||||||
AND (
|
|
||||||
created_at < NEW.created_at OR
|
|
||||||
(created_at = NEW.created_at AND id > NEW.id)
|
|
||||||
);
|
|
||||||
END;
|
|
||||||
```
|
|
||||||
|
|
||||||
## Implementation Strategy
|
|
||||||
|
|
||||||
### C Code Integration
|
|
||||||
|
|
||||||
#### Event Type Classification
|
|
||||||
```c
|
|
||||||
typedef enum {
|
|
||||||
EVENT_TYPE_REGULAR,
|
|
||||||
EVENT_TYPE_REPLACEABLE,
|
|
||||||
EVENT_TYPE_EPHEMERAL,
|
|
||||||
EVENT_TYPE_ADDRESSABLE,
|
|
||||||
EVENT_TYPE_UNKNOWN
|
|
||||||
} event_type_t;
|
|
||||||
|
|
||||||
event_type_t classify_event_kind(int kind) {
|
|
||||||
if ((kind >= 1000 && kind < 10000) ||
|
|
||||||
(kind >= 4 && kind < 45) ||
|
|
||||||
kind == 1 || kind == 2) {
|
|
||||||
return EVENT_TYPE_REGULAR;
|
|
||||||
}
|
|
||||||
if ((kind >= 10000 && kind < 20000) ||
|
|
||||||
kind == 0 || kind == 3) {
|
|
||||||
return EVENT_TYPE_REPLACEABLE;
|
|
||||||
}
|
|
||||||
if (kind >= 20000 && kind < 30000) {
|
|
||||||
return EVENT_TYPE_EPHEMERAL;
|
|
||||||
}
|
|
||||||
if (kind >= 30000 && kind < 40000) {
|
|
||||||
return EVENT_TYPE_ADDRESSABLE;
|
|
||||||
}
|
|
||||||
return EVENT_TYPE_UNKNOWN;
|
|
||||||
}
|
|
||||||
|
|
||||||
const char* event_type_to_string(event_type_t type) {
|
|
||||||
switch (type) {
|
|
||||||
case EVENT_TYPE_REGULAR: return "regular";
|
|
||||||
case EVENT_TYPE_REPLACEABLE: return "replaceable";
|
|
||||||
case EVENT_TYPE_EPHEMERAL: return "ephemeral";
|
|
||||||
case EVENT_TYPE_ADDRESSABLE: return "addressable";
|
|
||||||
default: return "unknown";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Simplified Event Storage
|
|
||||||
```c
|
|
||||||
int store_event(cJSON* event) {
|
|
||||||
// Extract fields
|
|
||||||
cJSON* id = cJSON_GetObjectItem(event, "id");
|
|
||||||
cJSON* pubkey = cJSON_GetObjectItem(event, "pubkey");
|
|
||||||
cJSON* created_at = cJSON_GetObjectItem(event, "created_at");
|
|
||||||
cJSON* kind = cJSON_GetObjectItem(event, "kind");
|
|
||||||
cJSON* content = cJSON_GetObjectItem(event, "content");
|
|
||||||
cJSON* sig = cJSON_GetObjectItem(event, "sig");
|
|
||||||
|
|
||||||
// Classify event type
|
|
||||||
event_type_t type = classify_event_kind(cJSON_GetNumberValue(kind));
|
|
||||||
|
|
||||||
// Serialize tags to JSON
|
|
||||||
cJSON* tags = cJSON_GetObjectItem(event, "tags");
|
|
||||||
char* tags_json = cJSON_Print(tags ? tags : cJSON_CreateArray());
|
|
||||||
|
|
||||||
// Single INSERT statement - database handles replacement via triggers
|
|
||||||
const char* sql =
|
|
||||||
"INSERT INTO events (id, pubkey, created_at, kind, event_type, content, sig, tags) "
|
|
||||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?)";
|
|
||||||
|
|
||||||
sqlite3_stmt* stmt;
|
|
||||||
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
|
||||||
if (rc != SQLITE_OK) {
|
|
||||||
free(tags_json);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlite3_bind_text(stmt, 1, cJSON_GetStringValue(id), -1, SQLITE_STATIC);
|
|
||||||
sqlite3_bind_text(stmt, 2, cJSON_GetStringValue(pubkey), -1, SQLITE_STATIC);
|
|
||||||
sqlite3_bind_int64(stmt, 3, (sqlite3_int64)cJSON_GetNumberValue(created_at));
|
|
||||||
sqlite3_bind_int(stmt, 4, (int)cJSON_GetNumberValue(kind));
|
|
||||||
sqlite3_bind_text(stmt, 5, event_type_to_string(type), -1, SQLITE_STATIC);
|
|
||||||
sqlite3_bind_text(stmt, 6, cJSON_GetStringValue(content), -1, SQLITE_STATIC);
|
|
||||||
sqlite3_bind_text(stmt, 7, cJSON_GetStringValue(sig), -1, SQLITE_STATIC);
|
|
||||||
sqlite3_bind_text(stmt, 8, tags_json, -1, SQLITE_TRANSIENT);
|
|
||||||
|
|
||||||
rc = sqlite3_step(stmt);
|
|
||||||
sqlite3_finalize(stmt);
|
|
||||||
free(tags_json);
|
|
||||||
|
|
||||||
return (rc == SQLITE_DONE) ? 0 : -1;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Simple REQ Query Building
|
|
||||||
```c
|
|
||||||
char* build_filter_query(cJSON* filter) {
|
|
||||||
// Build single query against events table
|
|
||||||
// Much simpler than multi-table approach
|
|
||||||
|
|
||||||
GString* query = g_string_new("SELECT * FROM events WHERE 1=1");
|
|
||||||
|
|
||||||
// Handle ids filter
|
|
||||||
cJSON* ids = cJSON_GetObjectItem(filter, "ids");
|
|
||||||
if (ids && cJSON_IsArray(ids)) {
|
|
||||||
g_string_append(query, " AND id IN (");
|
|
||||||
// Add parameter placeholders
|
|
||||||
g_string_append(query, ")");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle authors filter
|
|
||||||
cJSON* authors = cJSON_GetObjectItem(filter, "authors");
|
|
||||||
if (authors && cJSON_IsArray(authors)) {
|
|
||||||
g_string_append(query, " AND pubkey IN (");
|
|
||||||
// Add parameter placeholders
|
|
||||||
g_string_append(query, ")");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle kinds filter
|
|
||||||
cJSON* kinds = cJSON_GetObjectItem(filter, "kinds");
|
|
||||||
if (kinds && cJSON_IsArray(kinds)) {
|
|
||||||
g_string_append(query, " AND kind IN (");
|
|
||||||
// Add parameter placeholders
|
|
||||||
g_string_append(query, ")");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle tag filters (#e, #p, etc.)
|
|
||||||
cJSON* item;
|
|
||||||
cJSON_ArrayForEach(item, filter) {
|
|
||||||
char* key = item->string;
|
|
||||||
if (key && key[0] == '#' && strlen(key) == 2) {
|
|
||||||
char tag_name = key[1];
|
|
||||||
g_string_append_printf(query,
|
|
||||||
" AND EXISTS (SELECT 1 FROM json_each(tags) "
|
|
||||||
"WHERE json_extract(value, '$[0]') = '%c' "
|
|
||||||
"AND json_extract(value, '$[1]') IN (", tag_name);
|
|
||||||
// Add parameter placeholders
|
|
||||||
g_string_append(query, "))");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle time range
|
|
||||||
cJSON* since = cJSON_GetObjectItem(filter, "since");
|
|
||||||
if (since) {
|
|
||||||
g_string_append(query, " AND created_at >= ?");
|
|
||||||
}
|
|
||||||
|
|
||||||
cJSON* until = cJSON_GetObjectItem(filter, "until");
|
|
||||||
if (until) {
|
|
||||||
g_string_append(query, " AND created_at <= ?");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Standard ordering and limit
|
|
||||||
g_string_append(query, " ORDER BY created_at DESC, id ASC");
|
|
||||||
|
|
||||||
cJSON* limit = cJSON_GetObjectItem(filter, "limit");
|
|
||||||
if (limit) {
|
|
||||||
g_string_append(query, " LIMIT ?");
|
|
||||||
}
|
|
||||||
|
|
||||||
return g_string_free(query, FALSE);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Benefits of This Approach
|
|
||||||
|
|
||||||
### 1. Query Simplicity
|
|
||||||
- ✅ Single table = simple REQ queries
|
|
||||||
- ✅ No UNION complexity
|
|
||||||
- ✅ Familiar SQL patterns
|
|
||||||
- ✅ Easy LIMIT and ORDER BY handling
|
|
||||||
|
|
||||||
### 2. Protocol Compliance
|
|
||||||
- ✅ Event type classification enforced
|
|
||||||
- ✅ Replacement logic via triggers
|
|
||||||
- ✅ Unique constraints prevent duplicates
|
|
||||||
- ✅ Proper handling of all event types
|
|
||||||
|
|
||||||
### 3. Performance
|
|
||||||
- ✅ Unified indexes across all events
|
|
||||||
- ✅ No join overhead for basic queries
|
|
||||||
- ✅ JSON tag indexes for complex filters
|
|
||||||
- ✅ Single table scan for cross-kind queries
|
|
||||||
|
|
||||||
### 4. Implementation Simplicity
|
|
||||||
- ✅ Minimal changes from current code
|
|
||||||
- ✅ Database handles replacement logic
|
|
||||||
- ✅ Simple event storage function
|
|
||||||
- ✅ No complex routing logic needed
|
|
||||||
|
|
||||||
### 5. Future Flexibility
|
|
||||||
- ✅ Can add columns for new event types
|
|
||||||
- ✅ Can split tables later if needed
|
|
||||||
- ✅ Easy to add new indexes
|
|
||||||
- ✅ Extensible constraint system
|
|
||||||
|
|
||||||
## Migration Path
|
|
||||||
|
|
||||||
### Phase 1: Schema Update
|
|
||||||
1. Add `event_type` column to existing events table
|
|
||||||
2. Add JSON `tags` column
|
|
||||||
3. Create classification triggers
|
|
||||||
4. Add partial unique constraints
|
|
||||||
|
|
||||||
### Phase 2: Data Migration
|
|
||||||
1. Classify existing events by kind
|
|
||||||
2. Convert existing tag table data to JSON
|
|
||||||
3. Verify constraint compliance
|
|
||||||
4. Update indexes
|
|
||||||
|
|
||||||
### Phase 3: Code Updates
|
|
||||||
1. Update event storage to use new schema
|
|
||||||
2. Simplify REQ query building
|
|
||||||
3. Remove tag table JOIN logic
|
|
||||||
4. Test subscription filtering
|
|
||||||
|
|
||||||
### Phase 4: Optimization
|
|
||||||
1. Monitor query performance
|
|
||||||
2. Add specialized indexes as needed
|
|
||||||
3. Tune replacement triggers
|
|
||||||
4. Consider ephemeral event cleanup
|
|
||||||
|
|
||||||
## Conclusion
|
|
||||||
|
|
||||||
This hybrid approach achieves the best of both worlds:
|
|
||||||
|
|
||||||
- **Protocol compliance** through event type classification and constraints
|
|
||||||
- **Query simplicity** through unified storage
|
|
||||||
- **Performance** through targeted indexes
|
|
||||||
- **Implementation ease** through minimal complexity
|
|
||||||
|
|
||||||
The multi-table approach, while theoretically cleaner, creates a subscription query nightmare that would significantly burden the implementation. The hybrid single-table approach provides all the benefits with manageable complexity.
|
|
||||||
@@ -1,326 +0,0 @@
|
|||||||
# Implementation Plan: Hybrid Schema Migration
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
Migrating from the current two-table design (event + tag tables) to a single event table with JSON tags column and event type classification.
|
|
||||||
|
|
||||||
## Current Schema → Target Schema
|
|
||||||
|
|
||||||
### Current Schema (to be replaced)
|
|
||||||
```sql
|
|
||||||
CREATE TABLE event (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
pubkey TEXT NOT NULL,
|
|
||||||
created_at INTEGER NOT NULL,
|
|
||||||
kind INTEGER NOT NULL,
|
|
||||||
content TEXT NOT NULL,
|
|
||||||
sig TEXT NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE tag (
|
|
||||||
id TEXT NOT NULL, -- references event.id
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
value TEXT NOT NULL,
|
|
||||||
parameters TEXT
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
### Target Schema (simplified from final recommendation)
|
|
||||||
```sql
|
|
||||||
CREATE TABLE events (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
pubkey TEXT NOT NULL,
|
|
||||||
created_at INTEGER NOT NULL,
|
|
||||||
kind INTEGER NOT NULL,
|
|
||||||
event_type TEXT NOT NULL CHECK (event_type IN ('regular', 'replaceable', 'ephemeral', 'addressable')),
|
|
||||||
content TEXT NOT NULL,
|
|
||||||
sig TEXT NOT NULL,
|
|
||||||
tags JSON NOT NULL DEFAULT '[]',
|
|
||||||
first_seen INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
|
||||||
|
|
||||||
-- Optional: Protocol compliance constraints (can be added later)
|
|
||||||
CONSTRAINT unique_replaceable
|
|
||||||
UNIQUE (pubkey, kind) WHERE event_type = 'replaceable',
|
|
||||||
CONSTRAINT unique_addressable
|
|
||||||
UNIQUE (pubkey, kind, json_extract(tags, '$[?(@[0]=="d")][1]'))
|
|
||||||
WHERE event_type = 'addressable'
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
## Implementation Steps
|
|
||||||
|
|
||||||
### Phase 1: Update Schema File
|
|
||||||
|
|
||||||
**File**: `db/schema.sql`
|
|
||||||
|
|
||||||
1. Replace current event table definition
|
|
||||||
2. Remove tag table completely
|
|
||||||
3. Add new indexes for performance
|
|
||||||
4. Add event type classification logic
|
|
||||||
|
|
||||||
### Phase 2: Update C Code
|
|
||||||
|
|
||||||
**File**: `src/main.c`
|
|
||||||
|
|
||||||
1. Add event type classification function
|
|
||||||
2. Update `store_event()` function to use JSON tags
|
|
||||||
3. Update `retrieve_event()` function to return JSON tags
|
|
||||||
4. Remove all tag table related code
|
|
||||||
5. Update REQ query handling to use JSON tag queries
|
|
||||||
|
|
||||||
### Phase 3: Update Database Initialization
|
|
||||||
|
|
||||||
**File**: `db/init.sh`
|
|
||||||
|
|
||||||
1. Update table count validation (expect 1 table instead of 2)
|
|
||||||
2. Update schema verification logic
|
|
||||||
|
|
||||||
### Phase 4: Update Tests
|
|
||||||
|
|
||||||
**File**: `tests/1_nip_test.sh`
|
|
||||||
|
|
||||||
1. Verify events are stored with JSON tags
|
|
||||||
2. Test query functionality with new schema
|
|
||||||
3. Validate event type classification
|
|
||||||
|
|
||||||
### Phase 5: Migration Strategy
|
|
||||||
|
|
||||||
Create migration script to handle existing data (if any).
|
|
||||||
|
|
||||||
## Detailed Implementation
|
|
||||||
|
|
||||||
### 1. Event Type Classification
|
|
||||||
|
|
||||||
```c
|
|
||||||
// Add to src/main.c
|
|
||||||
typedef enum {
|
|
||||||
EVENT_TYPE_REGULAR,
|
|
||||||
EVENT_TYPE_REPLACEABLE,
|
|
||||||
EVENT_TYPE_EPHEMERAL,
|
|
||||||
EVENT_TYPE_ADDRESSABLE,
|
|
||||||
EVENT_TYPE_UNKNOWN
|
|
||||||
} event_type_t;
|
|
||||||
|
|
||||||
event_type_t classify_event_kind(int kind) {
|
|
||||||
if ((kind >= 1000 && kind < 10000) ||
|
|
||||||
(kind >= 4 && kind < 45) ||
|
|
||||||
kind == 1 || kind == 2) {
|
|
||||||
return EVENT_TYPE_REGULAR;
|
|
||||||
}
|
|
||||||
if ((kind >= 10000 && kind < 20000) ||
|
|
||||||
kind == 0 || kind == 3) {
|
|
||||||
return EVENT_TYPE_REPLACEABLE;
|
|
||||||
}
|
|
||||||
if (kind >= 20000 && kind < 30000) {
|
|
||||||
return EVENT_TYPE_EPHEMERAL;
|
|
||||||
}
|
|
||||||
if (kind >= 30000 && kind < 40000) {
|
|
||||||
return EVENT_TYPE_ADDRESSABLE;
|
|
||||||
}
|
|
||||||
return EVENT_TYPE_UNKNOWN;
|
|
||||||
}
|
|
||||||
|
|
||||||
const char* event_type_to_string(event_type_t type) {
|
|
||||||
switch (type) {
|
|
||||||
case EVENT_TYPE_REGULAR: return "regular";
|
|
||||||
case EVENT_TYPE_REPLACEABLE: return "replaceable";
|
|
||||||
case EVENT_TYPE_EPHEMERAL: return "ephemeral";
|
|
||||||
case EVENT_TYPE_ADDRESSABLE: return "addressable";
|
|
||||||
default: return "unknown";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Updated store_event Function
|
|
||||||
|
|
||||||
```c
|
|
||||||
// Replace existing store_event function
|
|
||||||
int store_event(cJSON* event) {
|
|
||||||
if (!g_db || !event) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract event fields
|
|
||||||
cJSON* id = cJSON_GetObjectItem(event, "id");
|
|
||||||
cJSON* pubkey = cJSON_GetObjectItem(event, "pubkey");
|
|
||||||
cJSON* created_at = cJSON_GetObjectItem(event, "created_at");
|
|
||||||
cJSON* kind = cJSON_GetObjectItem(event, "kind");
|
|
||||||
cJSON* content = cJSON_GetObjectItem(event, "content");
|
|
||||||
cJSON* sig = cJSON_GetObjectItem(event, "sig");
|
|
||||||
cJSON* tags = cJSON_GetObjectItem(event, "tags");
|
|
||||||
|
|
||||||
if (!id || !pubkey || !created_at || !kind || !content || !sig) {
|
|
||||||
log_error("Invalid event - missing required fields");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Classify event type
|
|
||||||
event_type_t type = classify_event_kind((int)cJSON_GetNumberValue(kind));
|
|
||||||
|
|
||||||
// Serialize tags to JSON (use empty array if no tags)
|
|
||||||
char* tags_json = NULL;
|
|
||||||
if (tags && cJSON_IsArray(tags)) {
|
|
||||||
tags_json = cJSON_Print(tags);
|
|
||||||
} else {
|
|
||||||
tags_json = strdup("[]");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tags_json) {
|
|
||||||
log_error("Failed to serialize tags to JSON");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Single INSERT statement
|
|
||||||
const char* sql =
|
|
||||||
"INSERT INTO events (id, pubkey, created_at, kind, event_type, content, sig, tags) "
|
|
||||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?)";
|
|
||||||
|
|
||||||
sqlite3_stmt* stmt;
|
|
||||||
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
|
||||||
if (rc != SQLITE_OK) {
|
|
||||||
log_error("Failed to prepare event insert statement");
|
|
||||||
free(tags_json);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Bind parameters
|
|
||||||
sqlite3_bind_text(stmt, 1, cJSON_GetStringValue(id), -1, SQLITE_STATIC);
|
|
||||||
sqlite3_bind_text(stmt, 2, cJSON_GetStringValue(pubkey), -1, SQLITE_STATIC);
|
|
||||||
sqlite3_bind_int64(stmt, 3, (sqlite3_int64)cJSON_GetNumberValue(created_at));
|
|
||||||
sqlite3_bind_int(stmt, 4, (int)cJSON_GetNumberValue(kind));
|
|
||||||
sqlite3_bind_text(stmt, 5, event_type_to_string(type), -1, SQLITE_STATIC);
|
|
||||||
sqlite3_bind_text(stmt, 6, cJSON_GetStringValue(content), -1, SQLITE_STATIC);
|
|
||||||
sqlite3_bind_text(stmt, 7, cJSON_GetStringValue(sig), -1, SQLITE_STATIC);
|
|
||||||
sqlite3_bind_text(stmt, 8, tags_json, -1, SQLITE_TRANSIENT);
|
|
||||||
|
|
||||||
// Execute statement
|
|
||||||
rc = sqlite3_step(stmt);
|
|
||||||
sqlite3_finalize(stmt);
|
|
||||||
|
|
||||||
if (rc != SQLITE_DONE) {
|
|
||||||
if (rc == SQLITE_CONSTRAINT) {
|
|
||||||
log_warning("Event already exists in database");
|
|
||||||
free(tags_json);
|
|
||||||
return 0; // Not an error, just duplicate
|
|
||||||
}
|
|
||||||
char error_msg[256];
|
|
||||||
snprintf(error_msg, sizeof(error_msg), "Failed to insert event: %s", sqlite3_errmsg(g_db));
|
|
||||||
log_error(error_msg);
|
|
||||||
free(tags_json);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
free(tags_json);
|
|
||||||
log_success("Event stored in database");
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Updated retrieve_event Function
|
|
||||||
|
|
||||||
```c
|
|
||||||
// Replace existing retrieve_event function
|
|
||||||
cJSON* retrieve_event(const char* event_id) {
|
|
||||||
if (!g_db || !event_id) {
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
const char* sql =
|
|
||||||
"SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE id = ?";
|
|
||||||
|
|
||||||
sqlite3_stmt* stmt;
|
|
||||||
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
|
||||||
if (rc != SQLITE_OK) {
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlite3_bind_text(stmt, 1, event_id, -1, SQLITE_STATIC);
|
|
||||||
|
|
||||||
cJSON* event = NULL;
|
|
||||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
|
||||||
event = cJSON_CreateObject();
|
|
||||||
|
|
||||||
cJSON_AddStringToObject(event, "id", (char*)sqlite3_column_text(stmt, 0));
|
|
||||||
cJSON_AddStringToObject(event, "pubkey", (char*)sqlite3_column_text(stmt, 1));
|
|
||||||
cJSON_AddNumberToObject(event, "created_at", sqlite3_column_int64(stmt, 2));
|
|
||||||
cJSON_AddNumberToObject(event, "kind", sqlite3_column_int(stmt, 3));
|
|
||||||
cJSON_AddStringToObject(event, "content", (char*)sqlite3_column_text(stmt, 4));
|
|
||||||
cJSON_AddStringToObject(event, "sig", (char*)sqlite3_column_text(stmt, 5));
|
|
||||||
|
|
||||||
// Parse tags JSON
|
|
||||||
const char* tags_json = (char*)sqlite3_column_text(stmt, 6);
|
|
||||||
if (tags_json) {
|
|
||||||
cJSON* tags = cJSON_Parse(tags_json);
|
|
||||||
if (tags) {
|
|
||||||
cJSON_AddItemToObject(event, "tags", tags);
|
|
||||||
} else {
|
|
||||||
cJSON_AddItemToObject(event, "tags", cJSON_CreateArray());
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
cJSON_AddItemToObject(event, "tags", cJSON_CreateArray());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlite3_finalize(stmt);
|
|
||||||
return event;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Migration Considerations
|
|
||||||
|
|
||||||
### Handling Existing Data
|
|
||||||
|
|
||||||
If there's existing data in the current schema:
|
|
||||||
|
|
||||||
1. **Export existing events and tags**
|
|
||||||
2. **Transform tag data to JSON format**
|
|
||||||
3. **Classify events by kind**
|
|
||||||
4. **Import into new schema**
|
|
||||||
|
|
||||||
### Backward Compatibility
|
|
||||||
|
|
||||||
- API remains the same - events still have the same JSON structure
|
|
||||||
- Internal storage changes but external interface is unchanged
|
|
||||||
- Tests should pass with minimal modifications
|
|
||||||
|
|
||||||
## Performance Optimizations
|
|
||||||
|
|
||||||
### Essential Indexes
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Core performance indexes
|
|
||||||
CREATE INDEX idx_events_pubkey ON events(pubkey);
|
|
||||||
CREATE INDEX idx_events_kind ON events(kind);
|
|
||||||
CREATE INDEX idx_events_created_at ON events(created_at DESC);
|
|
||||||
CREATE INDEX idx_events_event_type ON events(event_type);
|
|
||||||
|
|
||||||
-- Composite indexes for common query patterns
|
|
||||||
CREATE INDEX idx_events_kind_created_at ON events(kind, created_at DESC);
|
|
||||||
CREATE INDEX idx_events_pubkey_created_at ON events(pubkey, created_at DESC);
|
|
||||||
|
|
||||||
-- JSON tag indexes for common tag patterns
|
|
||||||
CREATE INDEX idx_events_e_tags ON events(
|
|
||||||
json_extract(tags, '$[*][1]')
|
|
||||||
) WHERE json_extract(tags, '$[*][0]') = 'e';
|
|
||||||
|
|
||||||
CREATE INDEX idx_events_p_tags ON events(
|
|
||||||
json_extract(tags, '$[*][1]')
|
|
||||||
) WHERE json_extract(tags, '$[*][0]') = 'p';
|
|
||||||
```
|
|
||||||
|
|
||||||
## Next Steps
|
|
||||||
|
|
||||||
1. **Switch to code mode** to implement the schema changes
|
|
||||||
2. **Update db/schema.sql** with new table definition
|
|
||||||
3. **Modify src/main.c** with new functions
|
|
||||||
4. **Update db/init.sh** for single table validation
|
|
||||||
5. **Test with existing test suite**
|
|
||||||
|
|
||||||
This approach will provide:
|
|
||||||
- ✅ Simplified schema management
|
|
||||||
- ✅ Protocol compliance preparation
|
|
||||||
- ✅ JSON tag query capabilities
|
|
||||||
- ✅ Performance optimization opportunities
|
|
||||||
- ✅ Easy REQ subscription handling
|
|
||||||
|
|
||||||
Ready to proceed with implementation?
|
|
||||||
@@ -1,331 +0,0 @@
|
|||||||
# Subscription Query Complexity Analysis
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
This document analyzes how Nostr REQ subscription filters would be implemented across different schema designs, focusing on query complexity, performance implications, and implementation burden.
|
|
||||||
|
|
||||||
## Nostr REQ Filter Specification Recap
|
|
||||||
|
|
||||||
Clients send REQ messages with filters containing:
|
|
||||||
- **`ids`**: List of specific event IDs
|
|
||||||
- **`authors`**: List of pubkeys
|
|
||||||
- **`kinds`**: List of event kinds
|
|
||||||
- **`#<letter>`**: Tag filters (e.g., `#e` for event refs, `#p` for pubkey mentions)
|
|
||||||
- **`since`/`until`**: Time range filters
|
|
||||||
- **`limit`**: Maximum events to return
|
|
||||||
|
|
||||||
### Key Filter Behaviors:
|
|
||||||
- **Multiple filters = OR logic**: Match any filter
|
|
||||||
- **Within filter = AND logic**: Match all specified conditions
|
|
||||||
- **Lists = IN logic**: Match any value in the list
|
|
||||||
- **Tag filters**: Must have at least one matching tag
|
|
||||||
|
|
||||||
## Schema Comparison for REQ Handling
|
|
||||||
|
|
||||||
### Current Simple Schema (Single Table)
|
|
||||||
```sql
|
|
||||||
CREATE TABLE event (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
pubkey TEXT NOT NULL,
|
|
||||||
created_at INTEGER NOT NULL,
|
|
||||||
kind INTEGER NOT NULL,
|
|
||||||
content TEXT NOT NULL,
|
|
||||||
sig TEXT NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE tag (
|
|
||||||
id TEXT NOT NULL, -- event ID
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
value TEXT NOT NULL,
|
|
||||||
parameters TEXT
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Sample REQ Query Implementation:
|
|
||||||
```sql
|
|
||||||
-- Filter: {"authors": ["pubkey1", "pubkey2"], "kinds": [1, 6], "#p": ["target_pubkey"]}
|
|
||||||
SELECT DISTINCT e.*
|
|
||||||
FROM event e
|
|
||||||
WHERE e.pubkey IN ('pubkey1', 'pubkey2')
|
|
||||||
AND e.kind IN (1, 6)
|
|
||||||
AND EXISTS (
|
|
||||||
SELECT 1 FROM tag t
|
|
||||||
WHERE t.id = e.id AND t.name = 'p' AND t.value = 'target_pubkey'
|
|
||||||
)
|
|
||||||
ORDER BY e.created_at DESC, e.id ASC
|
|
||||||
LIMIT ?;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Multi-Table Schema Challenge
|
|
||||||
|
|
||||||
With separate tables (`events_regular`, `events_replaceable`, `events_ephemeral`, `events_addressable`), a REQ filter could potentially match events across ALL tables.
|
|
||||||
|
|
||||||
#### Problem Example:
|
|
||||||
Filter: `{"kinds": [1, 0, 20001, 30023]}`
|
|
||||||
- Kind 1 → `events_regular`
|
|
||||||
- Kind 0 → `events_replaceable`
|
|
||||||
- Kind 20001 → `events_ephemeral`
|
|
||||||
- Kind 30023 → `events_addressable`
|
|
||||||
|
|
||||||
This requires **4 separate queries + UNION**, significantly complicating the implementation.
|
|
||||||
|
|
||||||
## Multi-Table Query Complexity
|
|
||||||
|
|
||||||
### Scenario 1: Cross-Table Kind Filter
|
|
||||||
```sql
|
|
||||||
-- Filter: {"kinds": [1, 0, 30023]}
|
|
||||||
-- Requires querying 3 different tables
|
|
||||||
|
|
||||||
SELECT id, pubkey, created_at, kind, content, sig FROM events_regular
|
|
||||||
WHERE kind = 1
|
|
||||||
UNION ALL
|
|
||||||
SELECT id, pubkey, created_at, kind, content, sig FROM events_replaceable
|
|
||||||
WHERE kind = 0
|
|
||||||
UNION ALL
|
|
||||||
SELECT id, pubkey, created_at, kind, content, sig FROM events_addressable
|
|
||||||
WHERE kind = 30023
|
|
||||||
ORDER BY created_at DESC, id ASC
|
|
||||||
LIMIT ?;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Scenario 2: Cross-Table Author Filter
|
|
||||||
```sql
|
|
||||||
-- Filter: {"authors": ["pubkey1"]}
|
|
||||||
-- Must check ALL tables for this author
|
|
||||||
|
|
||||||
SELECT id, pubkey, created_at, kind, content, sig FROM events_regular
|
|
||||||
WHERE pubkey = 'pubkey1'
|
|
||||||
UNION ALL
|
|
||||||
SELECT id, pubkey, created_at, kind, content, sig FROM events_replaceable
|
|
||||||
WHERE pubkey = 'pubkey1'
|
|
||||||
UNION ALL
|
|
||||||
SELECT id, pubkey, created_at, kind, content, sig FROM events_ephemeral
|
|
||||||
WHERE pubkey = 'pubkey1'
|
|
||||||
UNION ALL
|
|
||||||
SELECT id, pubkey, created_at, kind, content, sig FROM events_addressable
|
|
||||||
WHERE pubkey = 'pubkey1'
|
|
||||||
ORDER BY created_at DESC, id ASC
|
|
||||||
LIMIT ?;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Scenario 3: Complex Multi-Condition Filter
|
|
||||||
```sql
|
|
||||||
-- Filter: {"authors": ["pubkey1"], "kinds": [1, 0], "#p": ["target"], "since": 1234567890}
|
|
||||||
-- Extremely complex with multiple UNIONs and tag JOINs
|
|
||||||
|
|
||||||
WITH regular_results AS (
|
|
||||||
SELECT DISTINCT r.*
|
|
||||||
FROM events_regular r
|
|
||||||
JOIN tags_regular tr ON r.id = tr.event_id
|
|
||||||
WHERE r.pubkey = 'pubkey1'
|
|
||||||
AND r.kind = 1
|
|
||||||
AND r.created_at >= 1234567890
|
|
||||||
AND tr.name = 'p' AND tr.value = 'target'
|
|
||||||
),
|
|
||||||
replaceable_results AS (
|
|
||||||
SELECT DISTINCT rp.*
|
|
||||||
FROM events_replaceable rp
|
|
||||||
JOIN tags_replaceable trp ON (rp.pubkey, rp.kind) = (trp.event_pubkey, trp.event_kind)
|
|
||||||
WHERE rp.pubkey = 'pubkey1'
|
|
||||||
AND rp.kind = 0
|
|
||||||
AND rp.created_at >= 1234567890
|
|
||||||
AND trp.name = 'p' AND trp.value = 'target'
|
|
||||||
)
|
|
||||||
SELECT * FROM regular_results
|
|
||||||
UNION ALL
|
|
||||||
SELECT * FROM replaceable_results
|
|
||||||
ORDER BY created_at DESC, id ASC
|
|
||||||
LIMIT ?;
|
|
||||||
```
|
|
||||||
|
|
||||||
## Implementation Burden Analysis
|
|
||||||
|
|
||||||
### Single Table Approach
|
|
||||||
```c
|
|
||||||
// Simple - one query builder function
|
|
||||||
char* build_filter_query(cJSON* filters) {
|
|
||||||
// Build single SELECT with WHERE conditions
|
|
||||||
// Single ORDER BY and LIMIT
|
|
||||||
// One execution path
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Multi-Table Approach
|
|
||||||
```c
|
|
||||||
// Complex - requires routing and union logic
|
|
||||||
char* build_multi_table_query(cJSON* filters) {
|
|
||||||
// 1. Analyze kinds to determine which tables to query
|
|
||||||
// 2. Split filters per table type
|
|
||||||
// 3. Build separate queries for each table
|
|
||||||
// 4. Union results with complex ORDER BY
|
|
||||||
// 5. Handle LIMIT across UNION (tricky!)
|
|
||||||
}
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
bool needs_regular;
|
|
||||||
bool needs_replaceable;
|
|
||||||
bool needs_ephemeral;
|
|
||||||
bool needs_addressable;
|
|
||||||
cJSON* regular_filter;
|
|
||||||
cJSON* replaceable_filter;
|
|
||||||
cJSON* ephemeral_filter;
|
|
||||||
cJSON* addressable_filter;
|
|
||||||
} filter_routing_t;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Query Routing Complexity
|
|
||||||
|
|
||||||
For each REQ filter, we must:
|
|
||||||
|
|
||||||
1. **Analyze kinds** → Determine which tables to query
|
|
||||||
2. **Split filters** → Create per-table filter conditions
|
|
||||||
3. **Handle tag filters** → Different tag table references per event type
|
|
||||||
4. **Union results** → Merge with proper ordering
|
|
||||||
5. **Apply LIMIT** → Complex with UNION queries
|
|
||||||
|
|
||||||
## Performance Implications
|
|
||||||
|
|
||||||
### Single Table Advantages:
|
|
||||||
- ✅ **Single query execution**
|
|
||||||
- ✅ **One index strategy**
|
|
||||||
- ✅ **Simple LIMIT handling**
|
|
||||||
- ✅ **Unified ORDER BY**
|
|
||||||
- ✅ **No UNION overhead**
|
|
||||||
|
|
||||||
### Multi-Table Disadvantages:
|
|
||||||
- ❌ **Multiple query executions**
|
|
||||||
- ❌ **UNION sorting overhead**
|
|
||||||
- ❌ **Complex LIMIT application**
|
|
||||||
- ❌ **Index fragmentation across tables**
|
|
||||||
- ❌ **Result set merging complexity**
|
|
||||||
|
|
||||||
## Specific REQ Filter Challenges
|
|
||||||
|
|
||||||
### 1. LIMIT Handling with UNION
|
|
||||||
```sql
|
|
||||||
-- WRONG: Limit applies to each subquery
|
|
||||||
(SELECT * FROM events_regular WHERE ... LIMIT 100)
|
|
||||||
UNION ALL
|
|
||||||
(SELECT * FROM events_replaceable WHERE ... LIMIT 100)
|
|
||||||
-- Could return 200 events!
|
|
||||||
|
|
||||||
-- CORRECT: Limit applies to final result
|
|
||||||
SELECT * FROM (
|
|
||||||
SELECT * FROM events_regular WHERE ...
|
|
||||||
UNION ALL
|
|
||||||
SELECT * FROM events_replaceable WHERE ...
|
|
||||||
ORDER BY created_at DESC, id ASC
|
|
||||||
) LIMIT 100;
|
|
||||||
-- But this sorts ALL results before limiting!
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Tag Filter Complexity
|
|
||||||
Each event type needs different tag table joins:
|
|
||||||
- `events_regular` → `tags_regular`
|
|
||||||
- `events_replaceable` → `tags_replaceable` (with composite key)
|
|
||||||
- `events_addressable` → `tags_addressable` (with composite key)
|
|
||||||
- `events_ephemeral` → `tags_ephemeral`
|
|
||||||
|
|
||||||
### 3. Subscription State Management
|
|
||||||
With multiple tables, subscription state becomes complex:
|
|
||||||
- Which tables does this subscription monitor?
|
|
||||||
- How to efficiently check new events across tables?
|
|
||||||
- Different trigger/notification patterns per table
|
|
||||||
|
|
||||||
## Alternative: Unified Event View
|
|
||||||
|
|
||||||
### Hybrid Approach: Views Over Multi-Tables
|
|
||||||
```sql
|
|
||||||
-- Create unified view for queries
|
|
||||||
CREATE VIEW all_events AS
|
|
||||||
SELECT
|
|
||||||
'regular' as event_type,
|
|
||||||
id, pubkey, created_at, kind, content, sig
|
|
||||||
FROM events_regular
|
|
||||||
UNION ALL
|
|
||||||
SELECT
|
|
||||||
'replaceable' as event_type,
|
|
||||||
id, pubkey, created_at, kind, content, sig
|
|
||||||
FROM events_replaceable
|
|
||||||
UNION ALL
|
|
||||||
SELECT
|
|
||||||
'ephemeral' as event_type,
|
|
||||||
id, pubkey, created_at, kind, content, sig
|
|
||||||
FROM events_ephemeral
|
|
||||||
UNION ALL
|
|
||||||
SELECT
|
|
||||||
'addressable' as event_type,
|
|
||||||
id, pubkey, created_at, kind, content, sig
|
|
||||||
FROM events_addressable;
|
|
||||||
|
|
||||||
-- Unified tag view
|
|
||||||
CREATE VIEW all_tags AS
|
|
||||||
SELECT event_id, name, value, parameters FROM tags_regular
|
|
||||||
UNION ALL
|
|
||||||
SELECT CONCAT(event_pubkey, ':', event_kind), name, value, parameters FROM tags_replaceable
|
|
||||||
UNION ALL
|
|
||||||
SELECT event_id, name, value, parameters FROM tags_ephemeral
|
|
||||||
UNION ALL
|
|
||||||
SELECT CONCAT(event_pubkey, ':', event_kind, ':', d_tag), name, value, parameters FROM tags_addressable;
|
|
||||||
```
|
|
||||||
|
|
||||||
### REQ Query Against Views:
|
|
||||||
```sql
|
|
||||||
-- Much simpler - back to single-table complexity
|
|
||||||
SELECT DISTINCT e.*
|
|
||||||
FROM all_events e
|
|
||||||
JOIN all_tags t ON e.id = t.event_id
|
|
||||||
WHERE e.pubkey IN (?)
|
|
||||||
AND e.kind IN (?)
|
|
||||||
AND t.name = 'p' AND t.value = ?
|
|
||||||
ORDER BY e.created_at DESC, e.id ASC
|
|
||||||
LIMIT ?;
|
|
||||||
```
|
|
||||||
|
|
||||||
## Recommendation
|
|
||||||
|
|
||||||
**The multi-table approach creates significant subscription query complexity that may outweigh its benefits.**
|
|
||||||
|
|
||||||
### Key Issues:
|
|
||||||
1. **REQ filters don't map to event types** - clients filter by kind, author, tags, not storage semantics
|
|
||||||
2. **UNION query complexity** - much harder to optimize and implement
|
|
||||||
3. **Subscription management burden** - must monitor multiple tables
|
|
||||||
4. **Performance uncertainty** - UNION queries may be slower than single table
|
|
||||||
|
|
||||||
### Alternative Recommendation:
|
|
||||||
|
|
||||||
**Modified Single Table with Event Type Column:**
|
|
||||||
|
|
||||||
```sql
|
|
||||||
CREATE TABLE events (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
pubkey TEXT NOT NULL,
|
|
||||||
created_at INTEGER NOT NULL,
|
|
||||||
kind INTEGER NOT NULL,
|
|
||||||
event_type TEXT NOT NULL, -- 'regular', 'replaceable', 'ephemeral', 'addressable'
|
|
||||||
content TEXT NOT NULL,
|
|
||||||
sig TEXT NOT NULL,
|
|
||||||
tags JSON,
|
|
||||||
|
|
||||||
-- Replaceable event fields
|
|
||||||
replaced_at INTEGER,
|
|
||||||
|
|
||||||
-- Addressable event fields
|
|
||||||
d_tag TEXT,
|
|
||||||
|
|
||||||
-- Unique constraints per event type
|
|
||||||
CONSTRAINT unique_replaceable
|
|
||||||
UNIQUE (pubkey, kind) WHERE event_type = 'replaceable',
|
|
||||||
CONSTRAINT unique_addressable
|
|
||||||
UNIQUE (pubkey, kind, d_tag) WHERE event_type = 'addressable'
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
### Benefits:
|
|
||||||
- ✅ **Simple REQ queries** - single table, familiar patterns
|
|
||||||
- ✅ **Type enforcement** - partial unique constraints handle replacement logic
|
|
||||||
- ✅ **Performance** - unified indexes, no UNIONs
|
|
||||||
- ✅ **Implementation simplicity** - minimal changes from current code
|
|
||||||
- ✅ **Future flexibility** - can split tables later if needed
|
|
||||||
|
|
||||||
This approach gets the best of both worlds: protocol compliance through constraints, but query simplicity through unified storage.
|
|
||||||
289
relay.log
289
relay.log
@@ -5,236 +5,127 @@
|
|||||||
[32m[SUCCESS][0m WebSocket relay started on ws://127.0.0.1:8888
|
[32m[SUCCESS][0m WebSocket relay started on ws://127.0.0.1:8888
|
||||||
[34m[INFO][0m WebSocket connection established
|
[34m[INFO][0m WebSocket connection established
|
||||||
[34m[INFO][0m Received WebSocket message
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Handling REQ message
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND kind IN (1) ORDER BY created_at DESC LIMIT 500
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Query returned 5 rows
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Total events sent: 5
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m WebSocket connection closed
|
[34m[INFO][0m WebSocket connection closed
|
||||||
[34m[INFO][0m WebSocket connection established
|
[34m[INFO][0m WebSocket connection established
|
||||||
[34m[INFO][0m Received WebSocket message
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Handling EVENT message
|
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||||
[32m[SUCCESS][0m Event stored in database
|
[32m[SUCCESS][0m Event stored in database
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
[32m[SUCCESS][0m Event validated and stored successfully
|
||||||
[34m[INFO][0m WebSocket connection closed
|
[34m[INFO][0m WebSocket connection closed
|
||||||
[34m[INFO][0m WebSocket connection established
|
[34m[INFO][0m WebSocket connection established
|
||||||
[34m[INFO][0m Received WebSocket message
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Handling EVENT message
|
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||||
[32m[SUCCESS][0m Event stored in database
|
[32m[SUCCESS][0m Event stored in database
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
[32m[SUCCESS][0m Event validated and stored successfully
|
||||||
[34m[INFO][0m WebSocket connection closed
|
[34m[INFO][0m WebSocket connection closed
|
||||||
[34m[INFO][0m WebSocket connection established
|
[34m[INFO][0m WebSocket connection established
|
||||||
[34m[INFO][0m Received WebSocket message
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Handling EVENT message
|
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||||
[32m[SUCCESS][0m Event stored in database
|
[32m[SUCCESS][0m Event stored in database
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
[32m[SUCCESS][0m Event validated and stored successfully
|
||||||
[34m[INFO][0m WebSocket connection closed
|
[34m[INFO][0m WebSocket connection closed
|
||||||
[34m[INFO][0m WebSocket connection established
|
[34m[INFO][0m WebSocket connection established
|
||||||
[34m[INFO][0m Received WebSocket message
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Handling EVENT message
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[33m[WARNING][0m Subscription 'exists_1757082297' not found for removal
|
||||||
|
[34m[INFO][0m Closed subscription: exists_1757082297
|
||||||
|
[34m[INFO][0m WebSocket connection closed
|
||||||
|
[34m[INFO][0m WebSocket connection established
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[33m[WARNING][0m Subscription 'exists_1757082298' not found for removal
|
||||||
|
[34m[INFO][0m Closed subscription: exists_1757082298
|
||||||
|
[34m[INFO][0m WebSocket connection closed
|
||||||
|
[34m[INFO][0m WebSocket connection established
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||||
|
[34m[INFO][0m Event not found for deletion: [34m[INFO][0m ...
|
||||||
|
[34m[INFO][0m Event not found for deletion: [34m[INFO][0m ...
|
||||||
[32m[SUCCESS][0m Event stored in database
|
[32m[SUCCESS][0m Event stored in database
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
[34m[INFO][0m Deletion request processed: 0 events deleted
|
||||||
[34m[INFO][0m WebSocket connection closed
|
[34m[INFO][0m WebSocket connection closed
|
||||||
[34m[INFO][0m WebSocket connection established
|
[34m[INFO][0m WebSocket connection established
|
||||||
[34m[INFO][0m Received WebSocket message
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Handling EVENT message
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[33m[WARNING][0m Subscription 'exists_1757082301' not found for removal
|
||||||
|
[34m[INFO][0m Closed subscription: exists_1757082301
|
||||||
|
[34m[INFO][0m WebSocket connection closed
|
||||||
|
[34m[INFO][0m WebSocket connection established
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[33m[WARNING][0m Subscription 'exists_1757082301' not found for removal
|
||||||
|
[34m[INFO][0m Closed subscription: exists_1757082301
|
||||||
|
[34m[INFO][0m WebSocket connection closed
|
||||||
|
[34m[INFO][0m WebSocket connection established
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[33m[WARNING][0m Subscription 'exists_1757082301' not found for removal
|
||||||
|
[34m[INFO][0m Closed subscription: exists_1757082301
|
||||||
|
[34m[INFO][0m WebSocket connection closed
|
||||||
|
[34m[INFO][0m WebSocket connection established
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||||
[32m[SUCCESS][0m Event stored in database
|
[32m[SUCCESS][0m Event stored in database
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
[34m[INFO][0m Deletion request processed: 0 events deleted
|
||||||
[34m[INFO][0m WebSocket connection closed
|
[34m[INFO][0m WebSocket connection closed
|
||||||
[34m[INFO][0m WebSocket connection established
|
[34m[INFO][0m WebSocket connection established
|
||||||
[34m[INFO][0m Received WebSocket message
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Handling EVENT message
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[33m[WARNING][0m Subscription 'exists_1757082305' not found for removal
|
||||||
|
[34m[INFO][0m Closed subscription: exists_1757082305
|
||||||
|
[34m[INFO][0m WebSocket connection closed
|
||||||
|
[34m[INFO][0m WebSocket connection established
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||||
|
[34m[INFO][0m Event not found for deletion: [31m✗[0m Cou...
|
||||||
[32m[SUCCESS][0m Event stored in database
|
[32m[SUCCESS][0m Event stored in database
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
[34m[INFO][0m Deletion request processed: 0 events deleted
|
||||||
[34m[INFO][0m WebSocket connection closed
|
[34m[INFO][0m WebSocket connection closed
|
||||||
[34m[INFO][0m WebSocket connection established
|
[34m[INFO][0m WebSocket connection established
|
||||||
[34m[INFO][0m Received WebSocket message
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Handling EVENT message
|
[34m[INFO][0m Handling REQ message for persistent subscription
|
||||||
[32m[SUCCESS][0m Event stored in database
|
[34m[INFO][0m Added subscription 'exists_1757082309' (total: 1)
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 ORDER BY created_at DESC LIMIT 500
|
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 ORDER BY created_at DESC LIMIT 500
|
||||||
[34m[INFO][0m Query returned 17 rows
|
[34m[INFO][0m Query returned 25 rows
|
||||||
[34m[INFO][0m Total events sent: 17
|
[34m[INFO][0m Total events sent: 25
|
||||||
[34m[INFO][0m Received WebSocket message
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Subscription closed
|
[34m[INFO][0m Removed subscription 'exists_1757082309' (total: 0)
|
||||||
|
[34m[INFO][0m Closed subscription: exists_1757082309
|
||||||
|
[34m[INFO][0m WebSocket connection closed
|
||||||
|
[33m[WARNING][0m Subscription 'z[<5B><>.Y' not found for removal
|
||||||
|
[34m[INFO][0m WebSocket connection established
|
||||||
|
[34m[INFO][0m Received WebSocket message
|
||||||
|
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||||
[34m[INFO][0m WebSocket connection closed
|
[34m[INFO][0m WebSocket connection closed
|
||||||
[34m[INFO][0m WebSocket connection established
|
[34m[INFO][0m WebSocket connection established
|
||||||
[34m[INFO][0m Received WebSocket message
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Handling REQ message
|
[34m[INFO][0m Handling REQ message for persistent subscription
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND kind IN (1) ORDER BY created_at DESC LIMIT 500
|
[34m[INFO][0m Added subscription 'kind5_1757082309' (total: 1)
|
||||||
[34m[INFO][0m Query returned 7 rows
|
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND kind IN (5) ORDER BY created_at DESC LIMIT 500
|
||||||
[34m[INFO][0m Total events sent: 7
|
[34m[INFO][0m Query returned 3 rows
|
||||||
|
[34m[INFO][0m Total events sent: 3
|
||||||
[34m[INFO][0m Received WebSocket message
|
[34m[INFO][0m Received WebSocket message
|
||||||
[34m[INFO][0m Subscription closed
|
[34m[INFO][0m Removed subscription 'kind5_1757082309' (total: 0)
|
||||||
[34m[INFO][0m WebSocket connection closed
|
[34m[INFO][0m Closed subscription: kind5_1757082309
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND kind IN (0) ORDER BY created_at DESC LIMIT 500
|
|
||||||
[34m[INFO][0m Query returned 1 rows
|
|
||||||
[34m[INFO][0m Total events sent: 1
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND pubkey IN ('aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4') ORDER BY created_at DESC LIMIT 500
|
|
||||||
[34m[INFO][0m Query returned 17 rows
|
|
||||||
[34m[INFO][0m Total events sent: 17
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND created_at >= 1756983802 ORDER BY created_at DESC LIMIT 500
|
|
||||||
[34m[INFO][0m Query returned 6 rows
|
|
||||||
[34m[INFO][0m Total events sent: 6
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 ORDER BY created_at DESC LIMIT 500
|
|
||||||
[34m[INFO][0m Query returned 17 rows
|
|
||||||
[34m[INFO][0m Total events sent: 17
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND kind IN (0,1) ORDER BY created_at DESC LIMIT 500
|
|
||||||
[34m[INFO][0m Query returned 8 rows
|
|
||||||
[34m[INFO][0m Total events sent: 8
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND kind IN (1) ORDER BY created_at DESC LIMIT 1
|
|
||||||
[34m[INFO][0m Query returned 1 rows
|
|
||||||
[34m[INFO][0m Total events sent: 1
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling EVENT message
|
|
||||||
[32m[SUCCESS][0m Event stored in database
|
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling EVENT message
|
|
||||||
[32m[SUCCESS][0m Event stored in database
|
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling EVENT message
|
|
||||||
[32m[SUCCESS][0m Event stored in database
|
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling EVENT message
|
|
||||||
[32m[SUCCESS][0m Event stored in database
|
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling EVENT message
|
|
||||||
[32m[SUCCESS][0m Event stored in database
|
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling EVENT message
|
|
||||||
[32m[SUCCESS][0m Event stored in database
|
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling EVENT message
|
|
||||||
[32m[SUCCESS][0m Event stored in database
|
|
||||||
[32m[SUCCESS][0m Event stored successfully
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 ORDER BY created_at DESC LIMIT 500
|
|
||||||
[34m[INFO][0m Query returned 22 rows
|
|
||||||
[34m[INFO][0m Total events sent: 22
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND kind IN (1) ORDER BY created_at DESC LIMIT 500
|
|
||||||
[34m[INFO][0m Query returned 9 rows
|
|
||||||
[34m[INFO][0m Total events sent: 9
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND kind IN (0) ORDER BY created_at DESC LIMIT 500
|
|
||||||
[34m[INFO][0m Query returned 1 rows
|
|
||||||
[34m[INFO][0m Total events sent: 1
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND pubkey IN ('aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4') ORDER BY created_at DESC LIMIT 500
|
|
||||||
[34m[INFO][0m Query returned 22 rows
|
|
||||||
[34m[INFO][0m Total events sent: 22
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND created_at >= 1756983945 ORDER BY created_at DESC LIMIT 500
|
|
||||||
[34m[INFO][0m Query returned 9 rows
|
|
||||||
[34m[INFO][0m Total events sent: 9
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 ORDER BY created_at DESC LIMIT 500
|
|
||||||
[34m[INFO][0m Query returned 22 rows
|
|
||||||
[34m[INFO][0m Total events sent: 22
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND kind IN (0,1) ORDER BY created_at DESC LIMIT 500
|
|
||||||
[34m[INFO][0m Query returned 10 rows
|
|
||||||
[34m[INFO][0m Total events sent: 10
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
|
||||||
[34m[INFO][0m WebSocket connection established
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Handling REQ message
|
|
||||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND kind IN (1) ORDER BY created_at DESC LIMIT 1
|
|
||||||
[34m[INFO][0m Query returned 1 rows
|
|
||||||
[34m[INFO][0m Total events sent: 1
|
|
||||||
[34m[INFO][0m Received WebSocket message
|
|
||||||
[34m[INFO][0m Subscription closed
|
|
||||||
[34m[INFO][0m WebSocket connection closed
|
[34m[INFO][0m WebSocket connection closed
|
||||||
|
[33m[WARNING][0m Subscription '<27>f<EFBFBD><66>.Y' not found for removal
|
||||||
|
|||||||
1528
src/main.c
1528
src/main.c
File diff suppressed because it is too large
Load Diff
@@ -99,6 +99,47 @@ publish_event() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Helper function to publish invalid event and expect rejection
|
||||||
|
publish_invalid_event() {
|
||||||
|
local event_json="$1"
|
||||||
|
local description="$2"
|
||||||
|
local expected_error="$3"
|
||||||
|
|
||||||
|
print_info "Publishing invalid $description..."
|
||||||
|
|
||||||
|
# Create EVENT message in Nostr format
|
||||||
|
local event_message="[\"EVENT\",$event_json]"
|
||||||
|
|
||||||
|
# Publish to relay
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo "$event_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "Connection failed")
|
||||||
|
else
|
||||||
|
print_error "websocat not found - required for testing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check response - should contain "false" and error message
|
||||||
|
if [[ "$response" == *"Connection failed"* ]]; then
|
||||||
|
print_error "Failed to connect to relay for $description"
|
||||||
|
return 1
|
||||||
|
elif [[ "$response" == *"false"* ]]; then
|
||||||
|
# Extract error message
|
||||||
|
local error_msg=$(echo "$response" | grep -o '"[^"]*invalid[^"]*"' | head -1 | sed 's/"//g' 2>/dev/null || echo "rejected")
|
||||||
|
print_success "$description correctly rejected: $error_msg"
|
||||||
|
echo # Add blank line for readability
|
||||||
|
return 0
|
||||||
|
elif [[ "$response" == *"true"* ]]; then
|
||||||
|
print_error "$description was incorrectly accepted (should have been rejected)"
|
||||||
|
echo # Add blank line for readability
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
print_warning "$description response unclear: $response"
|
||||||
|
echo # Add blank line for readability
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
# Test subscription with filters
|
# Test subscription with filters
|
||||||
test_subscription() {
|
test_subscription() {
|
||||||
local sub_id="$1"
|
local sub_id="$1"
|
||||||
@@ -211,7 +252,41 @@ run_comprehensive_test() {
|
|||||||
# Brief pause to let events settle
|
# Brief pause to let events settle
|
||||||
sleep 2
|
sleep 2
|
||||||
|
|
||||||
print_header "PHASE 2: Testing Subscriptions and Filters"
|
print_header "PHASE 2: Testing Invalid Events (NIP-01 Validation)"
|
||||||
|
|
||||||
|
print_step "Testing various invalid events that should be rejected..."
|
||||||
|
|
||||||
|
# Test 1: Event with invalid JSON structure (malformed)
|
||||||
|
local malformed_event='{"id":"invalid","pubkey":"invalid_pubkey","created_at":"not_a_number","kind":1,"tags":[],"content":"test"}'
|
||||||
|
publish_invalid_event "$malformed_event" "malformed event with invalid created_at" "invalid"
|
||||||
|
|
||||||
|
# Test 2: Event with missing required fields
|
||||||
|
local missing_field_event='{"id":"test123","pubkey":"valid_pubkey","kind":1,"tags":[],"content":"test"}'
|
||||||
|
publish_invalid_event "$missing_field_event" "event missing created_at and sig" "invalid"
|
||||||
|
|
||||||
|
# Test 3: Event with invalid pubkey format (not hex)
|
||||||
|
local invalid_pubkey_event='{"id":"abc123","pubkey":"not_valid_hex_pubkey","created_at":1234567890,"kind":1,"tags":[],"content":"test","sig":"fake_sig"}'
|
||||||
|
publish_invalid_event "$invalid_pubkey_event" "event with invalid pubkey format" "invalid"
|
||||||
|
|
||||||
|
# Test 4: Event with invalid event ID format
|
||||||
|
local invalid_id_event='{"id":"not_64_char_hex","pubkey":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","created_at":1234567890,"kind":1,"tags":[],"content":"test","sig":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"}'
|
||||||
|
publish_invalid_event "$invalid_id_event" "event with invalid ID format" "invalid"
|
||||||
|
|
||||||
|
# Test 5: Event with invalid signature
|
||||||
|
local invalid_sig_event='{"id":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","pubkey":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","created_at":1234567890,"kind":1,"tags":[],"content":"test","sig":"invalid_signature_format"}'
|
||||||
|
publish_invalid_event "$invalid_sig_event" "event with invalid signature format" "invalid"
|
||||||
|
|
||||||
|
# Test 6: Event with invalid kind (negative)
|
||||||
|
local invalid_kind_event='{"id":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","pubkey":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","created_at":1234567890,"kind":-1,"tags":[],"content":"test","sig":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"}'
|
||||||
|
publish_invalid_event "$invalid_kind_event" "event with negative kind" "invalid"
|
||||||
|
|
||||||
|
# Test 7: Event with invalid tags format (not array)
|
||||||
|
local invalid_tags_event='{"id":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","pubkey":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","created_at":1234567890,"kind":1,"tags":"not_an_array","content":"test","sig":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"}'
|
||||||
|
publish_invalid_event "$invalid_tags_event" "event with invalid tags format" "invalid"
|
||||||
|
|
||||||
|
print_success "Invalid event tests completed - all should have been rejected"
|
||||||
|
|
||||||
|
print_header "PHASE 3: Testing Subscriptions and Filters"
|
||||||
|
|
||||||
# Test subscription filters
|
# Test subscription filters
|
||||||
print_step "Testing various subscription filters..."
|
print_step "Testing various subscription filters..."
|
||||||
@@ -240,7 +315,7 @@ run_comprehensive_test() {
|
|||||||
# Test 7: Limit results
|
# Test 7: Limit results
|
||||||
test_subscription "test_limit" '{"kinds":[1],"limit":1}' "Limited to 1 event" "1"
|
test_subscription "test_limit" '{"kinds":[1],"limit":1}' "Limited to 1 event" "1"
|
||||||
|
|
||||||
print_header "PHASE 3: Database Verification"
|
print_header "PHASE 4: Database Verification"
|
||||||
|
|
||||||
# Check what's actually stored in the database
|
# Check what's actually stored in the database
|
||||||
print_step "Verifying database contents..."
|
print_step "Verifying database contents..."
|
||||||
@@ -265,13 +340,14 @@ run_comprehensive_test() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Run the comprehensive test
|
# Run the comprehensive test
|
||||||
print_header "Starting C-Relay Comprehensive Test Suite"
|
print_header "Starting C-Relay Comprehensive Test Suite with NIP-01 Validation"
|
||||||
echo
|
echo
|
||||||
|
|
||||||
if run_comprehensive_test; then
|
if run_comprehensive_test; then
|
||||||
echo
|
echo
|
||||||
print_success "All tests completed successfully!"
|
print_success "All tests completed successfully!"
|
||||||
print_info "The C-Relay hybrid schema implementation is working correctly"
|
print_info "The C-Relay with full NIP-01 validation is working correctly"
|
||||||
|
print_info "✅ Event validation, signature verification, and error handling all working"
|
||||||
echo
|
echo
|
||||||
exit 0
|
exit 0
|
||||||
else
|
else
|
||||||
|
|||||||
386
tests/9_delete_test.sh
Executable file
386
tests/9_delete_test.sh
Executable file
@@ -0,0 +1,386 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# NIP-09 Event Deletion Request Test for C-Relay
|
||||||
|
# Tests deletion request functionality - assumes relay is already running
|
||||||
|
# Based on the pattern from 1_nip_test.sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Color constants
|
||||||
|
RED='\033[31m'
|
||||||
|
GREEN='\033[32m'
|
||||||
|
YELLOW='\033[33m'
|
||||||
|
BLUE='\033[34m'
|
||||||
|
BOLD='\033[1m'
|
||||||
|
RESET='\033[0m'
|
||||||
|
|
||||||
|
# Test configuration
|
||||||
|
RELAY_URL="ws://127.0.0.1:8888"
|
||||||
|
TEST_PRIVATE_KEY="nsec1j4c6269y9w0q2er2xjw8sv2ehyrtfxq3jwgdlxj6qfn8z4gjsq5qfvfk99"
|
||||||
|
|
||||||
|
# Print functions
|
||||||
|
print_header() {
|
||||||
|
echo -e "${BLUE}${BOLD}=== $1 ===${RESET}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_step() {
|
||||||
|
echo -e "${YELLOW}[STEP]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_success() {
|
||||||
|
echo -e "${GREEN}✓${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_error() {
|
||||||
|
echo -e "${RED}✗${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_info() {
|
||||||
|
echo -e "${BLUE}[INFO]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_warning() {
|
||||||
|
echo -e "${YELLOW}[WARNING]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper function to publish event and extract ID
|
||||||
|
publish_event() {
|
||||||
|
local event_json="$1"
|
||||||
|
local description="$2"
|
||||||
|
|
||||||
|
# Extract event ID
|
||||||
|
local event_id=$(echo "$event_json" | jq -r '.id' 2>/dev/null)
|
||||||
|
if [[ "$event_id" == "null" || -z "$event_id" ]]; then
|
||||||
|
print_error "Could not extract event ID from $description"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_info "Publishing $description..."
|
||||||
|
|
||||||
|
# Create EVENT message in Nostr format
|
||||||
|
local event_message="[\"EVENT\",$event_json]"
|
||||||
|
|
||||||
|
# Publish to relay
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo "$event_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "Connection failed")
|
||||||
|
else
|
||||||
|
print_error "websocat not found - required for testing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check response
|
||||||
|
if [[ "$response" == *"Connection failed"* ]]; then
|
||||||
|
print_error "Failed to connect to relay for $description"
|
||||||
|
return 1
|
||||||
|
elif [[ "$response" == *"true"* ]]; then
|
||||||
|
print_success "$description uploaded (ID: ${event_id:0:16}...)"
|
||||||
|
echo "$event_id"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_warning "$description might have failed: $response"
|
||||||
|
echo ""
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper function to publish deletion request
|
||||||
|
publish_deletion_request() {
|
||||||
|
local deletion_event_json="$1"
|
||||||
|
local description="$2"
|
||||||
|
|
||||||
|
# Extract event ID
|
||||||
|
local event_id=$(echo "$deletion_event_json" | jq -r '.id' 2>/dev/null)
|
||||||
|
if [[ "$event_id" == "null" || -z "$event_id" ]]; then
|
||||||
|
print_error "Could not extract event ID from $description"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_info "Publishing $description..."
|
||||||
|
|
||||||
|
# Create EVENT message in Nostr format
|
||||||
|
local event_message="[\"EVENT\",$deletion_event_json]"
|
||||||
|
|
||||||
|
# Publish to relay
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo "$event_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "Connection failed")
|
||||||
|
else
|
||||||
|
print_error "websocat not found - required for testing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check response
|
||||||
|
if [[ "$response" == *"Connection failed"* ]]; then
|
||||||
|
print_error "Failed to connect to relay for $description"
|
||||||
|
return 1
|
||||||
|
elif [[ "$response" == *"true"* ]]; then
|
||||||
|
print_success "$description accepted (ID: ${event_id:0:16}...)"
|
||||||
|
echo "$event_id"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_warning "$description might have failed: $response"
|
||||||
|
echo ""
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper function to check if event exists via subscription
|
||||||
|
check_event_exists() {
|
||||||
|
local event_id="$1"
|
||||||
|
local sub_id="exists_$(date +%s%N | cut -c1-10)"
|
||||||
|
|
||||||
|
# Create REQ message to query for specific event ID
|
||||||
|
local req_message="[\"REQ\",\"$sub_id\",{\"ids\":[\"$event_id\"]}]"
|
||||||
|
|
||||||
|
# Send subscription and collect events
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo -e "$req_message\n[\"CLOSE\",\"$sub_id\"]" | timeout 3s websocat "$RELAY_URL" 2>/dev/null || echo "")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Count EVENT responses
|
||||||
|
local event_count=0
|
||||||
|
if [[ -n "$response" ]]; then
|
||||||
|
event_count=$(echo "$response" | grep -c "\"EVENT\"" 2>/dev/null || echo "0")
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$event_count"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Helper function to query events by kind
|
||||||
|
query_events_by_kind() {
|
||||||
|
local kind="$1"
|
||||||
|
local sub_id="kind${kind}_$(date +%s%N | cut -c1-10)"
|
||||||
|
|
||||||
|
# Create REQ message to query for events of specific kind
|
||||||
|
local req_message="[\"REQ\",\"$sub_id\",{\"kinds\":[$kind]}]"
|
||||||
|
|
||||||
|
# Send subscription and collect events
|
||||||
|
local response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
response=$(echo -e "$req_message\n[\"CLOSE\",\"$sub_id\"]" | timeout 3s websocat "$RELAY_URL" 2>/dev/null || echo "")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Count EVENT responses
|
||||||
|
local event_count=0
|
||||||
|
if [[ -n "$response" ]]; then
|
||||||
|
event_count=$(echo "$response" | grep -c "\"EVENT\"" 2>/dev/null || echo "0")
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$event_count"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main test function
|
||||||
|
run_deletion_test() {
|
||||||
|
print_header "NIP-09 Event Deletion Request Test"
|
||||||
|
|
||||||
|
# Check dependencies
|
||||||
|
print_step "Checking dependencies..."
|
||||||
|
if ! command -v nak &> /dev/null; then
|
||||||
|
print_error "nak command not found"
|
||||||
|
print_info "Please install nak: go install github.com/fiatjaf/nak@latest"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if ! command -v websocat &> /dev/null; then
|
||||||
|
print_error "websocat command not found"
|
||||||
|
print_info "Please install websocat for testing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if ! command -v jq &> /dev/null; then
|
||||||
|
print_error "jq command not found"
|
||||||
|
print_info "Please install jq for JSON processing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
print_success "All dependencies found"
|
||||||
|
|
||||||
|
print_header "PHASE 1: Publishing Events to be Deleted"
|
||||||
|
|
||||||
|
# Create test events that will be deleted
|
||||||
|
print_step "Creating events for deletion testing..."
|
||||||
|
|
||||||
|
# Create regular events (kind 1) - these will be deleted by ID
|
||||||
|
local event1=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Event to be deleted #1" -k 1 --ts $(($(date +%s) - 100)) -t "type=test" -t "phase=deletion" 2>/dev/null)
|
||||||
|
local event2=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Event to be deleted #2" -k 1 --ts $(($(date +%s) - 90)) -t "type=test" -t "phase=deletion" 2>/dev/null)
|
||||||
|
|
||||||
|
# Publish the events
|
||||||
|
event1_id=$(publish_event "$event1" "Event to be deleted #1")
|
||||||
|
if [[ -z "$event1_id" ]]; then
|
||||||
|
print_error "Failed to publish test event #1"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
event2_id=$(publish_event "$event2" "Event to be deleted #2")
|
||||||
|
if [[ -z "$event2_id" ]]; then
|
||||||
|
print_error "Failed to publish test event #2"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create an addressable event (kind 30001) - will be deleted by address
|
||||||
|
local addr_event=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Addressable event to be deleted" -k 30001 --ts $(($(date +%s) - 80)) -t "d=test-delete" -t "type=addressable" 2>/dev/null)
|
||||||
|
|
||||||
|
addr_event_id=$(publish_event "$addr_event" "Addressable event to be deleted")
|
||||||
|
if [[ -z "$addr_event_id" ]]; then
|
||||||
|
print_error "Failed to publish addressable test event"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create an event by a different author (to test unauthorized deletion)
|
||||||
|
local different_key="nsec1234567890abcdef1234567890abcdef1234567890abcdef1234567890ab"
|
||||||
|
local unauth_event=$(nak event --sec "$different_key" -c "Event by different author" -k 1 --ts $(($(date +%s) - 70)) -t "type=unauthorized" 2>/dev/null)
|
||||||
|
|
||||||
|
unauth_event_id=$(publish_event "$unauth_event" "Event by different author")
|
||||||
|
if [[ -z "$unauth_event_id" ]]; then
|
||||||
|
print_warning "Failed to publish unauthorized test event - continuing anyway"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Let events settle
|
||||||
|
sleep 2
|
||||||
|
|
||||||
|
print_header "PHASE 2: Testing Event Deletion by ID"
|
||||||
|
|
||||||
|
print_step "Verifying events exist before deletion..."
|
||||||
|
local event1_before=$(check_event_exists "$event1_id")
|
||||||
|
local event2_before=$(check_event_exists "$event2_id")
|
||||||
|
print_info "Event1 exists: $event1_before, Event2 exists: $event2_before"
|
||||||
|
|
||||||
|
# Create deletion request targeting the two events by ID
|
||||||
|
print_step "Creating deletion request for events by ID..."
|
||||||
|
local deletion_by_id=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Deleting events by ID" -k 5 --ts $(date +%s) -e "$event1_id" -e "$event2_id" -t "k=1" 2>/dev/null)
|
||||||
|
|
||||||
|
deletion_id=$(publish_deletion_request "$deletion_by_id" "Deletion request for events by ID")
|
||||||
|
if [[ -z "$deletion_id" ]]; then
|
||||||
|
print_error "Failed to publish deletion request"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Wait for deletion to process
|
||||||
|
sleep 3
|
||||||
|
|
||||||
|
# Check if events were deleted
|
||||||
|
print_step "Verifying events were deleted..."
|
||||||
|
local event1_after=$(check_event_exists "$event1_id")
|
||||||
|
local event2_after=$(check_event_exists "$event2_id")
|
||||||
|
print_info "Event1 exists after deletion: $event1_after, Event2 exists after deletion: $event2_after"
|
||||||
|
|
||||||
|
if [[ "$event1_after" == "0" && "$event2_after" == "0" ]]; then
|
||||||
|
print_success "✓ Events successfully deleted by ID"
|
||||||
|
else
|
||||||
|
print_error "✗ Events were not properly deleted"
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_header "PHASE 3: Testing Address-based Deletion"
|
||||||
|
|
||||||
|
if [[ -n "$addr_event_id" ]]; then
|
||||||
|
print_step "Verifying addressable event exists before deletion..."
|
||||||
|
local addr_before=$(check_event_exists "$addr_event_id")
|
||||||
|
print_info "Addressable event exists: $addr_before"
|
||||||
|
|
||||||
|
# Create deletion request for addressable event using 'a' tag
|
||||||
|
print_step "Creating deletion request for addressable event..."
|
||||||
|
local test_pubkey=$(echo "$addr_event" | jq -r '.pubkey' 2>/dev/null)
|
||||||
|
local deletion_by_addr=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Deleting addressable event" -k 5 --ts $(date +%s) -t "a=30001:${test_pubkey}:test-delete" -t "k=30001" 2>/dev/null)
|
||||||
|
|
||||||
|
addr_deletion_id=$(publish_deletion_request "$deletion_by_addr" "Deletion request for addressable event")
|
||||||
|
if [[ -n "$addr_deletion_id" ]]; then
|
||||||
|
# Wait for deletion to process
|
||||||
|
sleep 3
|
||||||
|
|
||||||
|
# Check if addressable event was deleted
|
||||||
|
print_step "Verifying addressable event was deleted..."
|
||||||
|
local addr_after=$(check_event_exists "$addr_event_id")
|
||||||
|
print_info "Addressable event exists after deletion: $addr_after"
|
||||||
|
|
||||||
|
if [[ "$addr_after" == "0" ]]; then
|
||||||
|
print_success "✓ Addressable event successfully deleted"
|
||||||
|
else
|
||||||
|
print_error "✗ Addressable event was not properly deleted"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_header "PHASE 4: Testing Unauthorized Deletion"
|
||||||
|
|
||||||
|
if [[ -n "$unauth_event_id" ]]; then
|
||||||
|
print_step "Testing unauthorized deletion attempt..."
|
||||||
|
|
||||||
|
# Try to delete the unauthorized event (should fail)
|
||||||
|
local unauth_deletion=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Attempting unauthorized deletion" -k 5 --ts $(date +%s) -e "$unauth_event_id" -t "k=1" 2>/dev/null)
|
||||||
|
|
||||||
|
unauth_deletion_id=$(publish_deletion_request "$unauth_deletion" "Unauthorized deletion request")
|
||||||
|
if [[ -n "$unauth_deletion_id" ]]; then
|
||||||
|
# Wait for processing
|
||||||
|
sleep 3
|
||||||
|
|
||||||
|
# Check if unauthorized event still exists (should still exist)
|
||||||
|
local unauth_after=$(check_event_exists "$unauth_event_id")
|
||||||
|
print_info "Unauthorized event exists after deletion attempt: $unauth_after"
|
||||||
|
|
||||||
|
if [[ "$unauth_after" == "1" ]]; then
|
||||||
|
print_success "✓ Unauthorized deletion properly rejected - event still exists"
|
||||||
|
else
|
||||||
|
print_error "✗ Unauthorized deletion succeeded - security vulnerability!"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_header "PHASE 5: Testing Invalid Deletion Requests"
|
||||||
|
|
||||||
|
print_step "Testing deletion request with no targets..."
|
||||||
|
|
||||||
|
# Create deletion request with no 'e' or 'a' tags (should be rejected)
|
||||||
|
local invalid_deletion='{"id":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","pubkey":"aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4","created_at":'$(date +%s)',"kind":5,"tags":[["k","1"]],"content":"Invalid deletion request with no targets","sig":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"}'
|
||||||
|
|
||||||
|
# Create EVENT message in Nostr format
|
||||||
|
local invalid_message="[\"EVENT\",$invalid_deletion]"
|
||||||
|
|
||||||
|
# Publish to relay
|
||||||
|
local invalid_response=""
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
invalid_response=$(echo "$invalid_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "Connection failed")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check response - should be rejected
|
||||||
|
if [[ "$invalid_response" == *"false"* ]]; then
|
||||||
|
print_success "✓ Invalid deletion request properly rejected"
|
||||||
|
elif [[ "$invalid_response" == *"true"* ]]; then
|
||||||
|
print_warning "⚠ Invalid deletion request was accepted (should have been rejected)"
|
||||||
|
else
|
||||||
|
print_info "Invalid deletion request response: $invalid_response"
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_header "PHASE 6: Verification"
|
||||||
|
|
||||||
|
# Verify deletion requests themselves are stored
|
||||||
|
print_step "Verifying deletion requests are stored..."
|
||||||
|
local deletion_count=$(query_events_by_kind 5)
|
||||||
|
print_info "Deletion requests accessible via query: $deletion_count"
|
||||||
|
|
||||||
|
if [[ "$deletion_count" -gt 0 ]]; then
|
||||||
|
print_success "✓ Deletion requests properly stored and queryable"
|
||||||
|
else
|
||||||
|
print_warning "⚠ No deletion requests found via query"
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run the test
|
||||||
|
print_header "Starting NIP-09 Event Deletion Request Test Suite"
|
||||||
|
echo
|
||||||
|
|
||||||
|
if run_deletion_test; then
|
||||||
|
echo
|
||||||
|
print_success "All NIP-09 deletion tests completed successfully!"
|
||||||
|
print_info "The C-Relay NIP-09 implementation is working correctly"
|
||||||
|
print_info "✅ Event deletion by ID working"
|
||||||
|
print_info "✅ Address-based deletion working"
|
||||||
|
print_info "✅ Authorization validation working"
|
||||||
|
print_info "✅ Invalid deletion rejection working"
|
||||||
|
echo
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo
|
||||||
|
print_error "Some NIP-09 tests failed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
199
tests/subscribe_all.sh
Executable file
199
tests/subscribe_all.sh
Executable file
@@ -0,0 +1,199 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Persistent Subscription Test Script
|
||||||
|
# Subscribes to all events in the relay and prints them as they arrive in real-time
|
||||||
|
# This tests the persistent subscription functionality of the C-Relay
|
||||||
|
|
||||||
|
set -e # Exit on any error
|
||||||
|
|
||||||
|
# Color constants
|
||||||
|
RED='\033[31m'
|
||||||
|
GREEN='\033[32m'
|
||||||
|
YELLOW='\033[33m'
|
||||||
|
BLUE='\033[34m'
|
||||||
|
BOLD='\033[1m'
|
||||||
|
RESET='\033[0m'
|
||||||
|
|
||||||
|
# Test configuration
|
||||||
|
RELAY_URL="ws://127.0.0.1:8888"
|
||||||
|
SUBSCRIPTION_ID="persistent_test_$(date +%s)"
|
||||||
|
|
||||||
|
# Print functions
|
||||||
|
print_header() {
|
||||||
|
echo -e "${BLUE}${BOLD}=== $1 ===${RESET}"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_info() {
|
||||||
|
echo -e "${BLUE}[INFO]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_success() {
|
||||||
|
echo -e "${GREEN}✓${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_error() {
|
||||||
|
echo -e "${RED}✗${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_warning() {
|
||||||
|
echo -e "${YELLOW}[WARNING]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
print_event() {
|
||||||
|
echo -e "${GREEN}[EVENT]${RESET} $1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Cleanup function
|
||||||
|
cleanup() {
|
||||||
|
print_info "Cleaning up..."
|
||||||
|
if [[ -n "$WEBSOCAT_PID" ]]; then
|
||||||
|
kill "$WEBSOCAT_PID" 2>/dev/null || true
|
||||||
|
wait "$WEBSOCAT_PID" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Send CLOSE message to clean up subscription on relay
|
||||||
|
if command -v websocat &> /dev/null; then
|
||||||
|
echo "[\"CLOSE\",\"$SUBSCRIPTION_ID\"]" | timeout 2s websocat "$RELAY_URL" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_info "Cleanup complete"
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set up signal handlers
|
||||||
|
trap cleanup SIGINT SIGTERM
|
||||||
|
|
||||||
|
# Parse events from relay responses
|
||||||
|
parse_events() {
|
||||||
|
while IFS= read -r line; do
|
||||||
|
# Check if this is an EVENT message
|
||||||
|
if echo "$line" | jq -e '. | type == "array" and length >= 3 and .[0] == "EVENT"' >/dev/null 2>&1; then
|
||||||
|
# Extract event details
|
||||||
|
local event_id=$(echo "$line" | jq -r '.[2].id' 2>/dev/null || echo "unknown")
|
||||||
|
local event_kind=$(echo "$line" | jq -r '.[2].kind' 2>/dev/null || echo "unknown")
|
||||||
|
local event_content=$(echo "$line" | jq -r '.[2].content' 2>/dev/null || echo "")
|
||||||
|
local event_pubkey=$(echo "$line" | jq -r '.[2].pubkey' 2>/dev/null || echo "unknown")
|
||||||
|
local event_created_at=$(echo "$line" | jq -r '.[2].created_at' 2>/dev/null || echo "unknown")
|
||||||
|
local event_tags=$(echo "$line" | jq -r '.[2].tags | length' 2>/dev/null || echo "0")
|
||||||
|
|
||||||
|
# Convert timestamp to readable format
|
||||||
|
local readable_time="unknown"
|
||||||
|
if [[ "$event_created_at" != "unknown" && "$event_created_at" =~ ^[0-9]+$ ]]; then
|
||||||
|
readable_time=$(date -d "@$event_created_at" "+%Y-%m-%d %H:%M:%S" 2>/dev/null || echo "$event_created_at")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Print formatted event
|
||||||
|
print_event "Kind: $event_kind | ID: ${event_id:0:16}... | Author: ${event_pubkey:0:16}..."
|
||||||
|
echo -e " ${YELLOW}Time:${RESET} $readable_time | ${YELLOW}Tags:${RESET} $event_tags"
|
||||||
|
|
||||||
|
# Show content (truncated if too long)
|
||||||
|
if [[ -n "$event_content" ]]; then
|
||||||
|
local truncated_content="${event_content:0:100}"
|
||||||
|
if [[ ${#event_content} -gt 100 ]]; then
|
||||||
|
truncated_content="${truncated_content}..."
|
||||||
|
fi
|
||||||
|
echo -e " ${YELLOW}Content:${RESET} $truncated_content"
|
||||||
|
fi
|
||||||
|
echo # Blank line for readability
|
||||||
|
|
||||||
|
elif echo "$line" | jq -e '. | type == "array" and length >= 2 and .[0] == "EOSE"' >/dev/null 2>&1; then
|
||||||
|
# End of stored events
|
||||||
|
local sub_id=$(echo "$line" | jq -r '.[1]' 2>/dev/null)
|
||||||
|
print_info "End of stored events for subscription: $sub_id"
|
||||||
|
print_success "Persistent subscription is now active - waiting for new events..."
|
||||||
|
echo
|
||||||
|
|
||||||
|
elif echo "$line" | jq -e '. | type == "array" and length >= 3 and .[0] == "CLOSED"' >/dev/null 2>&1; then
|
||||||
|
# Subscription closed
|
||||||
|
local sub_id=$(echo "$line" | jq -r '.[1]' 2>/dev/null)
|
||||||
|
local reason=$(echo "$line" | jq -r '.[2]' 2>/dev/null)
|
||||||
|
print_warning "Subscription $sub_id was closed: $reason"
|
||||||
|
|
||||||
|
elif echo "$line" | jq -e '. | type == "array" and length >= 4 and .[0] == "OK"' >/dev/null 2>&1; then
|
||||||
|
# OK response to event publishing
|
||||||
|
local event_id=$(echo "$line" | jq -r '.[1]' 2>/dev/null)
|
||||||
|
local success=$(echo "$line" | jq -r '.[2]' 2>/dev/null)
|
||||||
|
local message=$(echo "$line" | jq -r '.[3]' 2>/dev/null)
|
||||||
|
if [[ "$success" == "true" ]]; then
|
||||||
|
print_success "Event published: ${event_id:0:16}..."
|
||||||
|
else
|
||||||
|
print_error "Event publish failed: ${event_id:0:16}... - $message"
|
||||||
|
fi
|
||||||
|
|
||||||
|
else
|
||||||
|
# Unknown message type - just show it
|
||||||
|
print_info "Relay message: $line"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main function
|
||||||
|
main() {
|
||||||
|
print_header "Persistent Subscription Test - Subscribe to All Events"
|
||||||
|
|
||||||
|
# Check dependencies
|
||||||
|
if ! command -v websocat &> /dev/null; then
|
||||||
|
print_error "websocat command not found"
|
||||||
|
print_info "Please install websocat for testing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if ! command -v jq &> /dev/null; then
|
||||||
|
print_error "jq command not found"
|
||||||
|
print_info "Please install jq for JSON processing"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_info "Subscription ID: $SUBSCRIPTION_ID"
|
||||||
|
print_info "Relay URL: $RELAY_URL"
|
||||||
|
print_info "Filter: {} (all events)"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Create REQ message to subscribe to all events
|
||||||
|
local req_message="[\"REQ\",\"$SUBSCRIPTION_ID\",{}]"
|
||||||
|
|
||||||
|
print_info "Establishing persistent subscription..."
|
||||||
|
print_info "Press Ctrl+C to stop and cleanup"
|
||||||
|
echo
|
||||||
|
|
||||||
|
# Start websocat connection and keep it open
|
||||||
|
{
|
||||||
|
echo "$req_message"
|
||||||
|
# Keep the connection alive by sleeping indefinitely
|
||||||
|
# The connection will receive events as they come in
|
||||||
|
while true; do
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
} | websocat "$RELAY_URL" | parse_events &
|
||||||
|
|
||||||
|
# Store the background process ID
|
||||||
|
WEBSOCAT_PID=$!
|
||||||
|
|
||||||
|
# Wait for the background process (which runs indefinitely)
|
||||||
|
# This will exit when we get a signal (Ctrl+C)
|
||||||
|
wait "$WEBSOCAT_PID" 2>/dev/null || true
|
||||||
|
}
|
||||||
|
|
||||||
|
# Usage information
|
||||||
|
usage() {
|
||||||
|
echo "Usage: $0"
|
||||||
|
echo
|
||||||
|
echo "This script creates a persistent subscription to all events on the relay"
|
||||||
|
echo "and displays them in real-time as they arrive. Perfect for testing"
|
||||||
|
echo "the persistent subscription functionality."
|
||||||
|
echo
|
||||||
|
echo "To test:"
|
||||||
|
echo "1. Run this script in one terminal"
|
||||||
|
echo "2. Run 'tests/1_nip_test.sh' in another terminal"
|
||||||
|
echo "3. Watch events appear in real-time in this terminal"
|
||||||
|
echo
|
||||||
|
echo "Press Ctrl+C to stop and cleanup the subscription."
|
||||||
|
}
|
||||||
|
|
||||||
|
# Handle help flag
|
||||||
|
if [[ "$1" == "-h" || "$1" == "--help" ]]; then
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run main function
|
||||||
|
main "$@"
|
||||||
Reference in New Issue
Block a user