Compare commits
7 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e96957f91b | ||
|
|
de3e7c75a5 | ||
|
|
646adac981 | ||
|
|
2d93c2f819 | ||
|
|
ce7f7ad11b | ||
|
|
662feab881 | ||
|
|
227c579147 |
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
nostr_core_lib/
|
||||
nips/
|
||||
build/
|
||||
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "nostr_core_lib"]
|
||||
path = nostr_core_lib
|
||||
url = https://git.laantungir.net/laantungir/nostr_core_lib.git
|
||||
162
Makefile
Normal file
162
Makefile
Normal file
@@ -0,0 +1,162 @@
|
||||
# C-Relay Makefile
|
||||
|
||||
CC = gcc
|
||||
CFLAGS = -Wall -Wextra -std=c99 -g -O2
|
||||
INCLUDES = -I. -Inostr_core_lib -Inostr_core_lib/nostr_core -Inostr_core_lib/cjson -Inostr_core_lib/nostr_websocket
|
||||
LIBS = -lsqlite3 -lwebsockets -lz -ldl -lpthread -lm -L/usr/local/lib -lsecp256k1 -lssl -lcrypto -L/usr/local/lib -lcurl
|
||||
|
||||
# Build directory
|
||||
BUILD_DIR = build
|
||||
|
||||
# Source files
|
||||
MAIN_SRC = src/main.c
|
||||
NOSTR_CORE_LIB = nostr_core_lib/libnostr_core_x64.a
|
||||
|
||||
# Architecture detection
|
||||
ARCH = $(shell uname -m)
|
||||
ifeq ($(ARCH),x86_64)
|
||||
TARGET = $(BUILD_DIR)/c_relay_x86
|
||||
else ifeq ($(ARCH),aarch64)
|
||||
TARGET = $(BUILD_DIR)/c_relay_arm64
|
||||
else ifeq ($(ARCH),arm64)
|
||||
TARGET = $(BUILD_DIR)/c_relay_arm64
|
||||
else
|
||||
TARGET = $(BUILD_DIR)/c_relay_$(ARCH)
|
||||
endif
|
||||
|
||||
# Default target
|
||||
all: $(TARGET)
|
||||
|
||||
# Create build directory
|
||||
$(BUILD_DIR):
|
||||
mkdir -p $(BUILD_DIR)
|
||||
|
||||
# Check if nostr_core_lib is built
|
||||
$(NOSTR_CORE_LIB):
|
||||
@echo "Building nostr_core_lib..."
|
||||
cd nostr_core_lib && ./build.sh
|
||||
|
||||
# Build the relay
|
||||
$(TARGET): $(BUILD_DIR) $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
||||
@echo "Compiling C-Relay for architecture: $(ARCH)"
|
||||
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(TARGET) $(NOSTR_CORE_LIB) $(LIBS)
|
||||
@echo "Build complete: $(TARGET)"
|
||||
|
||||
# Build for specific architectures
|
||||
x86: $(BUILD_DIR) $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
||||
@echo "Building C-Relay for x86_64..."
|
||||
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(BUILD_DIR)/c_relay_x86 $(NOSTR_CORE_LIB) $(LIBS)
|
||||
@echo "Build complete: $(BUILD_DIR)/c_relay_x86"
|
||||
|
||||
arm64: $(BUILD_DIR) $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
||||
@echo "Cross-compiling C-Relay for ARM64..."
|
||||
@if ! command -v aarch64-linux-gnu-gcc >/dev/null 2>&1; then \
|
||||
echo "ERROR: ARM64 cross-compiler not found."; \
|
||||
echo "Install with: make install-cross-tools"; \
|
||||
echo "Or install manually: sudo apt install gcc-aarch64-linux-gnu"; \
|
||||
exit 1; \
|
||||
fi
|
||||
@echo "Checking for ARM64 development libraries..."
|
||||
@if ! dpkg -l | grep -q "libssl-dev:arm64\|libsqlite3-dev:arm64"; then \
|
||||
echo "ERROR: ARM64 libraries not found. Cross-compilation requires ARM64 versions of:"; \
|
||||
echo " - libssl-dev:arm64"; \
|
||||
echo " - libsqlite3-dev:arm64"; \
|
||||
echo " - libwebsockets-dev:arm64"; \
|
||||
echo " - libsecp256k1-dev:arm64"; \
|
||||
echo " - zlib1g-dev:arm64"; \
|
||||
echo " - libcurl4-openssl-dev:arm64"; \
|
||||
echo ""; \
|
||||
echo "Install ARM64 libraries with: make install-arm64-deps"; \
|
||||
echo "Or use Docker for cross-platform builds."; \
|
||||
exit 1; \
|
||||
fi
|
||||
@echo "Using aarch64-linux-gnu-gcc with ARM64 libraries..."
|
||||
PKG_CONFIG_PATH=/usr/lib/aarch64-linux-gnu/pkgconfig:/usr/share/pkgconfig \
|
||||
aarch64-linux-gnu-gcc $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(BUILD_DIR)/c_relay_arm64 $(NOSTR_CORE_LIB) \
|
||||
-L/usr/lib/aarch64-linux-gnu $(LIBS)
|
||||
@echo "Build complete: $(BUILD_DIR)/c_relay_arm64"
|
||||
|
||||
# Install ARM64 cross-compilation dependencies
|
||||
install-arm64-deps:
|
||||
@echo "Installing ARM64 cross-compilation dependencies..."
|
||||
@echo "This requires adding ARM64 architecture and installing cross-libraries..."
|
||||
sudo dpkg --add-architecture arm64
|
||||
sudo apt update
|
||||
sudo apt install -y \
|
||||
gcc-aarch64-linux-gnu \
|
||||
libc6-dev-arm64-cross \
|
||||
libssl-dev:arm64 \
|
||||
libsqlite3-dev:arm64 \
|
||||
zlib1g-dev:arm64 \
|
||||
libcurl4-openssl-dev:arm64
|
||||
@echo "Note: libwebsockets-dev:arm64 and libsecp256k1-dev:arm64 may need manual building"
|
||||
|
||||
# Install cross-compilation tools
|
||||
install-cross-tools:
|
||||
@echo "Installing cross-compilation tools..."
|
||||
sudo apt update
|
||||
sudo apt install -y gcc-aarch64-linux-gnu libc6-dev-arm64-cross
|
||||
|
||||
# Check what architectures we can actually build
|
||||
check-toolchain:
|
||||
@echo "Checking available toolchains:"
|
||||
@echo "Native compiler: $(shell $(CC) --version | head -1)"
|
||||
@if command -v aarch64-linux-gnu-gcc >/dev/null 2>&1; then \
|
||||
echo "ARM64 cross-compiler: $(shell aarch64-linux-gnu-gcc --version | head -1)"; \
|
||||
else \
|
||||
echo "ARM64 cross-compiler: NOT INSTALLED (install with 'make install-cross-tools')"; \
|
||||
fi
|
||||
|
||||
# Run tests
|
||||
test: $(TARGET)
|
||||
@echo "Running tests..."
|
||||
./tests/1_nip_test.sh
|
||||
|
||||
# Initialize database
|
||||
init-db:
|
||||
@echo "Initializing database..."
|
||||
./db/init.sh --force
|
||||
|
||||
# Clean build artifacts
|
||||
clean:
|
||||
rm -rf $(BUILD_DIR)
|
||||
@echo "Clean complete"
|
||||
|
||||
# Clean everything including nostr_core_lib
|
||||
clean-all: clean
|
||||
cd nostr_core_lib && make clean 2>/dev/null || true
|
||||
|
||||
# Install dependencies (Ubuntu/Debian)
|
||||
install-deps:
|
||||
@echo "Installing dependencies..."
|
||||
sudo apt update
|
||||
sudo apt install -y build-essential libsqlite3-dev libssl-dev libcurl4-openssl-dev libsecp256k1-dev zlib1g-dev jq curl
|
||||
|
||||
# Help
|
||||
help:
|
||||
@echo "C-Relay Build System"
|
||||
@echo ""
|
||||
@echo "Targets:"
|
||||
@echo " all Build the relay for current architecture (default)"
|
||||
@echo " x86 Build specifically for x86_64"
|
||||
@echo " arm64 Build for ARM64 (requires cross-compilation setup)"
|
||||
@echo " test Build and run tests"
|
||||
@echo " init-db Initialize the database"
|
||||
@echo " clean Clean build artifacts"
|
||||
@echo " clean-all Clean everything including dependencies"
|
||||
@echo " install-deps Install system dependencies"
|
||||
@echo " install-cross-tools Install basic ARM64 cross-compiler"
|
||||
@echo " install-arm64-deps Install ARM64 cross-compilation libraries"
|
||||
@echo " check-toolchain Check available compilers"
|
||||
@echo " help Show this help"
|
||||
@echo ""
|
||||
@echo "Usage:"
|
||||
@echo " make # Build the relay for current arch"
|
||||
@echo " make x86 # Build for x86_64"
|
||||
@echo " make arm64 # Build for ARM64 (fails if cross-compilation not set up)"
|
||||
@echo " make install-arm64-deps # Install full ARM64 cross-compilation setup"
|
||||
@echo " make check-toolchain # Check what compilers are available"
|
||||
@echo " make test # Run tests"
|
||||
@echo " make init-db # Set up database"
|
||||
|
||||
.PHONY: all x86 arm64 test init-db clean clean-all install-deps install-cross-tools install-arm64-deps check-toolchain help
|
||||
29
README.md
29
README.md
@@ -1,2 +1,29 @@
|
||||
A nostr relay in C.
|
||||
A nostr relay in C with sqlite on the back end.
|
||||
|
||||
<!--
|
||||
NOTE FOR ASSISTANTS: When updating the NIPs checklist below, ONLY change [ ] to [x] to mark as complete.
|
||||
Do NOT modify the formatting, add emojis, or change the text. Keep the simple format consistent.
|
||||
-->
|
||||
|
||||
|
||||
### [NIPs](https://github.com/nostr-protocol/nips)
|
||||
|
||||
- [x] NIP-01: Basic protocol flow implementation
|
||||
- [x] NIP-09: Event deletion
|
||||
- [ ] NIP-11: Relay information document
|
||||
- [ ] NIP-12: Generic tag queries
|
||||
- [ ] NIP-13: Proof of Work
|
||||
- [x] NIP-15: End of Stored Events Notice
|
||||
- [ ] NIP-16: Event Treatment
|
||||
- [x] NIP-20: Command Results
|
||||
- [ ] NIP-22: Event `created_at` Limits
|
||||
- [ ] NIP-25: Reactions
|
||||
- [ ] NIP-26: Delegated Event Signing
|
||||
- [ ] NIP-28: Public Chat
|
||||
- [ ] NIP-33: Parameterized Replaceable Events
|
||||
- [ ] NIP-40: Expiration Timestamp
|
||||
- [ ] NIP-42: Authentication of clients to relays
|
||||
- [ ] NIP-45: Counting results. [experimental](#count)
|
||||
- [ ] NIP-50: Keywords filter. [experimental](#search)
|
||||
- [ ] NIP-70: Protected Events
|
||||
|
||||
|
||||
391
build_and_push.sh
Executable file
391
build_and_push.sh
Executable file
@@ -0,0 +1,391 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
print_status() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
print_success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||
print_warning() { echo -e "${YELLOW}[WARNING]${NC} $1"; }
|
||||
print_error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
|
||||
# Global variables
|
||||
COMMIT_MESSAGE=""
|
||||
RELEASE_MODE=false
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-r|--release)
|
||||
RELEASE_MODE=true
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
show_usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
# First non-flag argument is the commit message
|
||||
if [[ -z "$COMMIT_MESSAGE" ]]; then
|
||||
COMMIT_MESSAGE="$1"
|
||||
fi
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
show_usage() {
|
||||
echo "C-Relay Build and Push Script"
|
||||
echo ""
|
||||
echo "Usage:"
|
||||
echo " $0 \"commit message\" - Default: compile, increment patch, commit & push"
|
||||
echo " $0 -r \"commit message\" - Release: compile x86+arm64, increment minor, create release"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 \"Fixed event validation bug\""
|
||||
echo " $0 --release \"Major release with new features\""
|
||||
echo ""
|
||||
echo "Default Mode (patch increment):"
|
||||
echo " - Compile C-Relay"
|
||||
echo " - Increment patch version (v1.2.3 → v1.2.4)"
|
||||
echo " - Git add, commit with message, and push"
|
||||
echo ""
|
||||
echo "Release Mode (-r flag):"
|
||||
echo " - Compile C-Relay for x86_64 and arm64"
|
||||
echo " - Increment minor version, zero patch (v1.2.3 → v1.3.0)"
|
||||
echo " - Git add, commit, push, and create Gitea release"
|
||||
echo ""
|
||||
echo "Requirements for Release Mode:"
|
||||
echo " - For ARM64 builds: make install-arm64-deps (optional - will build x86_64 only if missing)"
|
||||
echo " - Gitea token in ~/.gitea_token for release uploads"
|
||||
}
|
||||
|
||||
# Validate inputs
|
||||
if [[ -z "$COMMIT_MESSAGE" ]]; then
|
||||
print_error "Commit message is required"
|
||||
echo ""
|
||||
show_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if we're in a git repository
|
||||
check_git_repo() {
|
||||
if ! git rev-parse --git-dir > /dev/null 2>&1; then
|
||||
print_error "Not in a git repository"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to get current version and increment appropriately
|
||||
increment_version() {
|
||||
local increment_type="$1" # "patch" or "minor"
|
||||
|
||||
print_status "Getting current version..."
|
||||
|
||||
# Get the highest version tag (not chronologically latest)
|
||||
LATEST_TAG=$(git tag -l 'v*.*.*' | sort -V | tail -n 1 || echo "")
|
||||
if [[ -z "$LATEST_TAG" ]]; then
|
||||
LATEST_TAG="v0.0.0"
|
||||
print_warning "No version tags found, starting from $LATEST_TAG"
|
||||
fi
|
||||
|
||||
# Extract version components (remove 'v' prefix)
|
||||
VERSION=${LATEST_TAG#v}
|
||||
|
||||
# Parse major.minor.patch using regex
|
||||
if [[ $VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR=${BASH_REMATCH[1]}
|
||||
MINOR=${BASH_REMATCH[2]}
|
||||
PATCH=${BASH_REMATCH[3]}
|
||||
else
|
||||
print_error "Invalid version format in tag: $LATEST_TAG"
|
||||
print_error "Expected format: v0.1.0"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Increment version based on type
|
||||
if [[ "$increment_type" == "minor" ]]; then
|
||||
# Minor release: increment minor, zero patch
|
||||
NEW_MINOR=$((MINOR + 1))
|
||||
NEW_PATCH=0
|
||||
NEW_VERSION="v${MAJOR}.${NEW_MINOR}.${NEW_PATCH}"
|
||||
print_status "Release mode: incrementing minor version"
|
||||
else
|
||||
# Default: increment patch
|
||||
NEW_PATCH=$((PATCH + 1))
|
||||
NEW_VERSION="v${MAJOR}.${MINOR}.${NEW_PATCH}"
|
||||
print_status "Default mode: incrementing patch version"
|
||||
fi
|
||||
|
||||
print_status "Current version: $LATEST_TAG"
|
||||
print_status "New version: $NEW_VERSION"
|
||||
|
||||
# Export for use in other functions
|
||||
export NEW_VERSION
|
||||
}
|
||||
|
||||
# Function to compile the C-Relay project
|
||||
compile_project() {
|
||||
print_status "Compiling C-Relay..."
|
||||
|
||||
# Clean previous build
|
||||
if make clean > /dev/null 2>&1; then
|
||||
print_success "Cleaned previous build"
|
||||
else
|
||||
print_warning "Clean failed or no Makefile found"
|
||||
fi
|
||||
|
||||
# Compile the project
|
||||
if make > /dev/null 2>&1; then
|
||||
print_success "C-Relay compiled successfully"
|
||||
else
|
||||
print_error "Compilation failed"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to build release binaries
|
||||
build_release_binaries() {
|
||||
print_status "Building release binaries..."
|
||||
|
||||
# Build x86_64 version
|
||||
print_status "Building x86_64 version..."
|
||||
make clean > /dev/null 2>&1
|
||||
if make x86 > /dev/null 2>&1; then
|
||||
if [[ -f "build/c_relay_x86" ]]; then
|
||||
cp build/c_relay_x86 c-relay-x86_64
|
||||
print_success "x86_64 binary created: c-relay-x86_64"
|
||||
else
|
||||
print_error "x86_64 binary not found after compilation"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
print_error "x86_64 build failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Try to build ARM64 version
|
||||
print_status "Attempting ARM64 build..."
|
||||
make clean > /dev/null 2>&1
|
||||
if make arm64 > /dev/null 2>&1; then
|
||||
if [[ -f "build/c_relay_arm64" ]]; then
|
||||
cp build/c_relay_arm64 c-relay-arm64
|
||||
print_success "ARM64 binary created: c-relay-arm64"
|
||||
else
|
||||
print_warning "ARM64 binary not found after compilation"
|
||||
fi
|
||||
else
|
||||
print_warning "ARM64 build failed - ARM64 cross-compilation not properly set up"
|
||||
print_status "Only x86_64 binary will be included in release"
|
||||
fi
|
||||
|
||||
# Restore normal build
|
||||
make clean > /dev/null 2>&1
|
||||
make > /dev/null 2>&1
|
||||
}
|
||||
|
||||
# Function to commit and push changes
|
||||
git_commit_and_push() {
|
||||
print_status "Preparing git commit..."
|
||||
|
||||
# Stage all changes
|
||||
if git add . > /dev/null 2>&1; then
|
||||
print_success "Staged all changes"
|
||||
else
|
||||
print_error "Failed to stage changes"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if there are changes to commit
|
||||
if git diff --staged --quiet; then
|
||||
print_warning "No changes to commit"
|
||||
else
|
||||
# Commit changes
|
||||
if git commit -m "$NEW_VERSION - $COMMIT_MESSAGE" > /dev/null 2>&1; then
|
||||
print_success "Committed changes"
|
||||
else
|
||||
print_error "Failed to commit changes"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create new git tag
|
||||
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Created tag: $NEW_VERSION"
|
||||
else
|
||||
print_warning "Tag $NEW_VERSION already exists"
|
||||
fi
|
||||
|
||||
# Push changes and tags
|
||||
print_status "Pushing to remote repository..."
|
||||
if git push > /dev/null 2>&1; then
|
||||
print_success "Pushed changes"
|
||||
else
|
||||
print_error "Failed to push changes"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if git push --tags > /dev/null 2>&1; then
|
||||
print_success "Pushed tags"
|
||||
else
|
||||
print_warning "Failed to push tags"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to create Gitea release
|
||||
create_gitea_release() {
|
||||
print_status "Creating Gitea release..."
|
||||
|
||||
# Check for Gitea token
|
||||
if [[ ! -f "$HOME/.gitea_token" ]]; then
|
||||
print_warning "No ~/.gitea_token found. Skipping release creation."
|
||||
print_warning "Create ~/.gitea_token with your Gitea access token to enable releases."
|
||||
return 0
|
||||
fi
|
||||
|
||||
local token=$(cat "$HOME/.gitea_token" | tr -d '\n\r')
|
||||
local api_url="https://git.laantungir.net/api/v1/repos/laantungir/c-relay"
|
||||
|
||||
# Create release
|
||||
print_status "Creating release $NEW_VERSION..."
|
||||
local response=$(curl -s -X POST "$api_url/releases" \
|
||||
-H "Authorization: token $token" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"tag_name\": \"$NEW_VERSION\", \"name\": \"$NEW_VERSION\", \"body\": \"$COMMIT_MESSAGE\"}")
|
||||
|
||||
if echo "$response" | grep -q '"id"'; then
|
||||
print_success "Created release $NEW_VERSION"
|
||||
upload_release_binaries "$api_url" "$token"
|
||||
elif echo "$response" | grep -q "already exists"; then
|
||||
print_warning "Release $NEW_VERSION already exists"
|
||||
upload_release_binaries "$api_url" "$token"
|
||||
else
|
||||
print_error "Failed to create release $NEW_VERSION"
|
||||
print_error "Response: $response"
|
||||
|
||||
# Try to check if the release exists anyway
|
||||
print_status "Checking if release exists..."
|
||||
local check_response=$(curl -s -H "Authorization: token $token" "$api_url/releases/tags/$NEW_VERSION")
|
||||
if echo "$check_response" | grep -q '"id"'; then
|
||||
print_warning "Release exists but creation response was unexpected"
|
||||
upload_release_binaries "$api_url" "$token"
|
||||
else
|
||||
print_error "Release does not exist and creation failed"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to upload release binaries
|
||||
upload_release_binaries() {
|
||||
local api_url="$1"
|
||||
local token="$2"
|
||||
|
||||
# Get release ID with more robust parsing
|
||||
print_status "Getting release ID for $NEW_VERSION..."
|
||||
local response=$(curl -s -H "Authorization: token $token" "$api_url/releases/tags/$NEW_VERSION")
|
||||
local release_id=$(echo "$response" | grep -o '"id":[0-9]*' | head -n1 | cut -d: -f2)
|
||||
|
||||
if [[ -z "$release_id" ]]; then
|
||||
print_error "Could not get release ID for $NEW_VERSION"
|
||||
print_error "API Response: $response"
|
||||
|
||||
# Try to list all releases to debug
|
||||
print_status "Available releases:"
|
||||
curl -s -H "Authorization: token $token" "$api_url/releases" | grep -o '"tag_name":"[^"]*"' | head -5
|
||||
return 1
|
||||
fi
|
||||
|
||||
print_success "Found release ID: $release_id"
|
||||
|
||||
# Upload x86_64 binary
|
||||
if [[ -f "c-relay-x86_64" ]]; then
|
||||
print_status "Uploading x86_64 binary..."
|
||||
if curl -s -X POST "$api_url/releases/$release_id/assets" \
|
||||
-H "Authorization: token $token" \
|
||||
-F "attachment=@c-relay-x86_64;filename=c-relay-${NEW_VERSION}-linux-x86_64" > /dev/null; then
|
||||
print_success "Uploaded x86_64 binary"
|
||||
else
|
||||
print_warning "Failed to upload x86_64 binary"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Upload ARM64 binary
|
||||
if [[ -f "c-relay-arm64" ]]; then
|
||||
print_status "Uploading ARM64 binary..."
|
||||
if curl -s -X POST "$api_url/releases/$release_id/assets" \
|
||||
-H "Authorization: token $token" \
|
||||
-F "attachment=@c-relay-arm64;filename=c-relay-${NEW_VERSION}-linux-arm64" > /dev/null; then
|
||||
print_success "Uploaded ARM64 binary"
|
||||
else
|
||||
print_warning "Failed to upload ARM64 binary"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to clean up release binaries
|
||||
cleanup_release_binaries() {
|
||||
if [[ -f "c-relay-x86_64" ]]; then
|
||||
rm -f c-relay-x86_64
|
||||
print_status "Cleaned up x86_64 binary"
|
||||
fi
|
||||
if [[ -f "c-relay-arm64" ]]; then
|
||||
rm -f c-relay-arm64
|
||||
print_status "Cleaned up ARM64 binary"
|
||||
fi
|
||||
}
|
||||
|
||||
# Main execution
|
||||
main() {
|
||||
print_status "C-Relay Build and Push Script"
|
||||
|
||||
# Check prerequisites
|
||||
check_git_repo
|
||||
|
||||
if [[ "$RELEASE_MODE" == true ]]; then
|
||||
print_status "=== RELEASE MODE ==="
|
||||
|
||||
# Increment minor version for releases
|
||||
increment_version "minor"
|
||||
|
||||
# Compile project first
|
||||
compile_project
|
||||
|
||||
# Build release binaries
|
||||
build_release_binaries
|
||||
|
||||
# Commit and push
|
||||
git_commit_and_push
|
||||
|
||||
# Create Gitea release with binaries
|
||||
create_gitea_release
|
||||
|
||||
# Cleanup
|
||||
cleanup_release_binaries
|
||||
|
||||
print_success "Release $NEW_VERSION completed successfully!"
|
||||
print_status "Binaries uploaded to Gitea release"
|
||||
|
||||
else
|
||||
print_status "=== DEFAULT MODE ==="
|
||||
|
||||
# Increment patch version for regular commits
|
||||
increment_version "patch"
|
||||
|
||||
# Compile project
|
||||
compile_project
|
||||
|
||||
# Commit and push
|
||||
git_commit_and_push
|
||||
|
||||
print_success "Build and push completed successfully!"
|
||||
print_status "Version $NEW_VERSION pushed to repository"
|
||||
fi
|
||||
}
|
||||
|
||||
# Execute main function
|
||||
main
|
||||
BIN
c-relay-x86_64
Executable file
BIN
c-relay-x86_64
Executable file
Binary file not shown.
228
db/README.md
Normal file
228
db/README.md
Normal file
@@ -0,0 +1,228 @@
|
||||
# C Nostr Relay Database
|
||||
|
||||
This directory contains the SQLite database schema and initialization scripts for the C Nostr Relay implementation.
|
||||
|
||||
## Files
|
||||
|
||||
- **`schema.sql`** - Complete database schema based on nostr-rs-relay v18
|
||||
- **`init.sh`** - Database initialization script
|
||||
- **`c_nostr_relay.db`** - SQLite database file (created after running init.sh)
|
||||
|
||||
## Quick Start
|
||||
|
||||
1. **Initialize the database:**
|
||||
```bash
|
||||
cd db
|
||||
./init.sh
|
||||
```
|
||||
|
||||
2. **Force reinitialize (removes existing database):**
|
||||
```bash
|
||||
./init.sh --force
|
||||
```
|
||||
|
||||
3. **Initialize with optimization and info:**
|
||||
```bash
|
||||
./init.sh --info --optimize
|
||||
```
|
||||
|
||||
## Database Schema
|
||||
|
||||
The schema is fully compatible with the Nostr protocol and includes:
|
||||
|
||||
### Core Tables
|
||||
|
||||
- **`event`** - Main event storage with all Nostr event data
|
||||
- **`tag`** - Denormalized tag index for efficient queries
|
||||
- **`user_verification`** - NIP-05 verification tracking
|
||||
- **`account`** - User account management (optional)
|
||||
- **`invoice`** - Lightning payment tracking (optional)
|
||||
|
||||
### Key Features
|
||||
|
||||
- ✅ **NIP-01 compliant** - Full basic protocol support
|
||||
- ✅ **Replaceable events** - Supports kinds 0, 3, 10000-19999
|
||||
- ✅ **Parameterized replaceable** - Supports kinds 30000-39999 with `d` tags
|
||||
- ✅ **Event deletion** - NIP-09 soft deletion with `hidden` column
|
||||
- ✅ **Event expiration** - NIP-40 automatic cleanup
|
||||
- ✅ **Authentication** - NIP-42 client authentication
|
||||
- ✅ **NIP-05 verification** - Domain-based identity verification
|
||||
- ✅ **Performance optimized** - Comprehensive indexing strategy
|
||||
|
||||
### Schema Version
|
||||
|
||||
Current version: **v18** (compatible with nostr-rs-relay v18)
|
||||
|
||||
## Database Structure
|
||||
|
||||
### Event Storage
|
||||
```sql
|
||||
CREATE TABLE event (
|
||||
id INTEGER PRIMARY KEY,
|
||||
event_hash BLOB NOT NULL, -- 32-byte SHA256 hash
|
||||
first_seen INTEGER NOT NULL, -- relay receive timestamp
|
||||
created_at INTEGER NOT NULL, -- event creation timestamp
|
||||
expires_at INTEGER, -- NIP-40 expiration
|
||||
author BLOB NOT NULL, -- 32-byte pubkey
|
||||
delegated_by BLOB, -- NIP-26 delegator
|
||||
kind INTEGER NOT NULL, -- event kind
|
||||
hidden INTEGER DEFAULT FALSE, -- soft deletion flag
|
||||
content TEXT NOT NULL -- complete JSON event
|
||||
);
|
||||
```
|
||||
|
||||
### Tag Indexing
|
||||
```sql
|
||||
CREATE TABLE tag (
|
||||
id INTEGER PRIMARY KEY,
|
||||
event_id INTEGER NOT NULL,
|
||||
name TEXT, -- tag name ("e", "p", etc.)
|
||||
value TEXT, -- tag value
|
||||
created_at INTEGER NOT NULL, -- denormalized for performance
|
||||
kind INTEGER NOT NULL -- denormalized for performance
|
||||
);
|
||||
```
|
||||
|
||||
## Performance Features
|
||||
|
||||
### Optimized Indexes
|
||||
- **Hash-based lookups** - `event_hash_index` for O(1) event retrieval
|
||||
- **Author queries** - `author_index`, `author_created_at_index`
|
||||
- **Kind filtering** - `kind_index`, `kind_created_at_index`
|
||||
- **Tag searching** - `tag_covering_index` for efficient tag queries
|
||||
- **Composite queries** - Multi-column indexes for complex filters
|
||||
|
||||
### Query Optimization
|
||||
- **Denormalized tags** - Includes `kind` and `created_at` in tag table
|
||||
- **Binary storage** - BLOBs for hex data (pubkeys, hashes)
|
||||
- **WAL mode** - Write-Ahead Logging for concurrent access
|
||||
- **Automatic cleanup** - Triggers for data integrity
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Basic Operations
|
||||
|
||||
1. **Insert an event:**
|
||||
```sql
|
||||
INSERT INTO event (event_hash, first_seen, created_at, author, kind, content)
|
||||
VALUES (?, ?, ?, ?, ?, ?);
|
||||
```
|
||||
|
||||
2. **Query by author:**
|
||||
```sql
|
||||
SELECT content FROM event
|
||||
WHERE author = ? AND hidden != TRUE
|
||||
ORDER BY created_at DESC;
|
||||
```
|
||||
|
||||
3. **Filter by tags:**
|
||||
```sql
|
||||
SELECT e.content FROM event e
|
||||
JOIN tag t ON e.id = t.event_id
|
||||
WHERE t.name = 'p' AND t.value = ? AND e.hidden != TRUE;
|
||||
```
|
||||
|
||||
### Advanced Queries
|
||||
|
||||
1. **Get replaceable event (latest only):**
|
||||
```sql
|
||||
SELECT content FROM event
|
||||
WHERE author = ? AND kind = ? AND hidden != TRUE
|
||||
ORDER BY created_at DESC LIMIT 1;
|
||||
```
|
||||
|
||||
2. **Tag-based filtering (NIP-01 filters):**
|
||||
```sql
|
||||
SELECT e.content FROM event e
|
||||
WHERE e.id IN (
|
||||
SELECT t.event_id FROM tag t
|
||||
WHERE t.name = ? AND t.value IN (?, ?, ?)
|
||||
) AND e.hidden != TRUE;
|
||||
```
|
||||
|
||||
## Maintenance
|
||||
|
||||
### Regular Operations
|
||||
|
||||
1. **Check database integrity:**
|
||||
```bash
|
||||
sqlite3 c_nostr_relay.db "PRAGMA integrity_check;"
|
||||
```
|
||||
|
||||
2. **Optimize database:**
|
||||
```bash
|
||||
sqlite3 c_nostr_relay.db "PRAGMA optimize; VACUUM; ANALYZE;"
|
||||
```
|
||||
|
||||
3. **Clean expired events:**
|
||||
```sql
|
||||
DELETE FROM event WHERE expires_at <= strftime('%s', 'now');
|
||||
```
|
||||
|
||||
### Monitoring
|
||||
|
||||
1. **Database size:**
|
||||
```bash
|
||||
ls -lh c_nostr_relay.db
|
||||
```
|
||||
|
||||
2. **Table statistics:**
|
||||
```sql
|
||||
SELECT name, COUNT(*) as count FROM (
|
||||
SELECT 'events' as name FROM event UNION ALL
|
||||
SELECT 'tags' as name FROM tag UNION ALL
|
||||
SELECT 'verifications' as name FROM user_verification
|
||||
) GROUP BY name;
|
||||
```
|
||||
|
||||
## Migration Support
|
||||
|
||||
The schema includes a migration system for future updates:
|
||||
|
||||
```sql
|
||||
CREATE TABLE schema_info (
|
||||
version INTEGER PRIMARY KEY,
|
||||
applied_at INTEGER NOT NULL,
|
||||
description TEXT
|
||||
);
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **Input validation** - Always validate event JSON and signatures
|
||||
2. **Rate limiting** - Implement at application level
|
||||
3. **Access control** - Use `account` table for permissions
|
||||
4. **Backup strategy** - Regular database backups recommended
|
||||
|
||||
## Compatibility
|
||||
|
||||
- **SQLite version** - Requires SQLite 3.8.0+
|
||||
- **nostr-rs-relay** - Schema compatible with v18
|
||||
- **NIPs supported** - 01, 02, 05, 09, 10, 11, 26, 40, 42
|
||||
- **C libraries** - Compatible with sqlite3 C API
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **Database locked error:**
|
||||
- Ensure proper connection closing in your C code
|
||||
- Check for long-running transactions
|
||||
|
||||
2. **Performance issues:**
|
||||
- Run `PRAGMA optimize;` regularly
|
||||
- Consider `VACUUM` if database grew significantly
|
||||
|
||||
3. **Schema errors:**
|
||||
- Verify SQLite version compatibility
|
||||
- Check foreign key constraints
|
||||
|
||||
### Getting Help
|
||||
|
||||
- Check the main project README for C implementation details
|
||||
- Review nostr-rs-relay documentation for reference implementation
|
||||
- Consult Nostr NIPs for protocol specifications
|
||||
|
||||
## License
|
||||
|
||||
This database schema is part of the C Nostr Relay project and follows the same license terms.
|
||||
BIN
db/c_nostr_relay.db
Normal file
BIN
db/c_nostr_relay.db
Normal file
Binary file not shown.
BIN
db/c_nostr_relay.db-shm
Normal file
BIN
db/c_nostr_relay.db-shm
Normal file
Binary file not shown.
BIN
db/c_nostr_relay.db-wal
Normal file
BIN
db/c_nostr_relay.db-wal
Normal file
Binary file not shown.
234
db/init.sh
Executable file
234
db/init.sh
Executable file
@@ -0,0 +1,234 @@
|
||||
#!/bin/bash
|
||||
|
||||
# C Nostr Relay Database Initialization Script
|
||||
# Creates and initializes the SQLite database with proper schema
|
||||
|
||||
set -e # Exit on any error
|
||||
|
||||
# Configuration
|
||||
DB_DIR="$(dirname "$0")"
|
||||
DB_NAME="c_nostr_relay.db"
|
||||
DB_PATH="${DB_DIR}/${DB_NAME}"
|
||||
SCHEMA_FILE="${DB_DIR}/schema.sql"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Logging functions
|
||||
log_info() {
|
||||
echo -e "${BLUE}[INFO]${NC} $1"
|
||||
}
|
||||
|
||||
log_success() {
|
||||
echo -e "${GREEN}[SUCCESS]${NC} $1"
|
||||
}
|
||||
|
||||
log_warning() {
|
||||
echo -e "${YELLOW}[WARNING]${NC} $1"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
echo -e "${RED}[ERROR]${NC} $1"
|
||||
}
|
||||
|
||||
# Check if SQLite3 is installed
|
||||
check_sqlite() {
|
||||
if ! command -v sqlite3 &> /dev/null; then
|
||||
log_error "sqlite3 is not installed. Please install it first:"
|
||||
echo " Ubuntu/Debian: sudo apt-get install sqlite3"
|
||||
echo " CentOS/RHEL: sudo yum install sqlite"
|
||||
echo " macOS: brew install sqlite3"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local version=$(sqlite3 --version | cut -d' ' -f1)
|
||||
log_info "Using SQLite version: $version"
|
||||
}
|
||||
|
||||
# Create database directory if it doesn't exist
|
||||
create_db_directory() {
|
||||
if [ ! -d "$DB_DIR" ]; then
|
||||
log_info "Creating database directory: $DB_DIR"
|
||||
mkdir -p "$DB_DIR"
|
||||
fi
|
||||
}
|
||||
|
||||
# Backup existing database if it exists
|
||||
backup_existing_db() {
|
||||
if [ -f "$DB_PATH" ]; then
|
||||
local backup_path="${DB_PATH}.backup.$(date +%Y%m%d_%H%M%S)"
|
||||
log_warning "Existing database found. Creating backup: $backup_path"
|
||||
cp "$DB_PATH" "$backup_path"
|
||||
fi
|
||||
}
|
||||
|
||||
# Initialize the database with schema
|
||||
init_database() {
|
||||
log_info "Initializing database: $DB_PATH"
|
||||
|
||||
if [ ! -f "$SCHEMA_FILE" ]; then
|
||||
log_error "Schema file not found: $SCHEMA_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Remove existing database if --force flag is used
|
||||
if [ "$1" = "--force" ] && [ -f "$DB_PATH" ]; then
|
||||
log_warning "Force flag detected. Removing existing database."
|
||||
rm -f "$DB_PATH"
|
||||
fi
|
||||
|
||||
# Create the database and apply schema
|
||||
log_info "Applying schema from: $SCHEMA_FILE"
|
||||
if sqlite3 "$DB_PATH" < "$SCHEMA_FILE"; then
|
||||
log_success "Database schema applied successfully"
|
||||
else
|
||||
log_error "Failed to apply database schema"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Verify database integrity
|
||||
verify_database() {
|
||||
log_info "Verifying database integrity..."
|
||||
|
||||
# Check if database file exists and is not empty
|
||||
if [ ! -s "$DB_PATH" ]; then
|
||||
log_error "Database file is empty or doesn't exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run SQLite integrity check
|
||||
local integrity_result=$(sqlite3 "$DB_PATH" "PRAGMA integrity_check;")
|
||||
if [ "$integrity_result" = "ok" ]; then
|
||||
log_success "Database integrity check passed"
|
||||
else
|
||||
log_error "Database integrity check failed: $integrity_result"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify schema version
|
||||
local schema_version=$(sqlite3 "$DB_PATH" "PRAGMA user_version;")
|
||||
log_info "Database schema version: $schema_version"
|
||||
|
||||
# Check that main tables exist
|
||||
local table_count=$(sqlite3 "$DB_PATH" "SELECT count(*) FROM sqlite_master WHERE type='table' AND name IN ('events', 'schema_info');")
|
||||
if [ "$table_count" -eq 2 ]; then
|
||||
log_success "Core tables created successfully"
|
||||
else
|
||||
log_error "Missing core tables (expected 2, found $table_count)"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Display database information
|
||||
show_db_info() {
|
||||
log_info "Database Information:"
|
||||
echo " Location: $DB_PATH"
|
||||
echo " Size: $(du -h "$DB_PATH" | cut -f1)"
|
||||
|
||||
log_info "Database Tables:"
|
||||
sqlite3 "$DB_PATH" "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name;" | sed 's/^/ - /'
|
||||
|
||||
log_info "Database Indexes:"
|
||||
sqlite3 "$DB_PATH" "SELECT name FROM sqlite_master WHERE type='index' AND name NOT LIKE 'sqlite_%' ORDER BY name;" | sed 's/^/ - /'
|
||||
|
||||
log_info "Database Views:"
|
||||
sqlite3 "$DB_PATH" "SELECT name FROM sqlite_master WHERE type='view' ORDER BY name;" | sed 's/^/ - /'
|
||||
}
|
||||
|
||||
# Run database optimization
|
||||
optimize_database() {
|
||||
log_info "Running database optimization..."
|
||||
sqlite3 "$DB_PATH" "PRAGMA optimize; VACUUM; ANALYZE;"
|
||||
log_success "Database optimization completed"
|
||||
}
|
||||
|
||||
# Print usage information
|
||||
print_usage() {
|
||||
echo "Usage: $0 [OPTIONS]"
|
||||
echo ""
|
||||
echo "Initialize SQLite database for C Nostr Relay"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --force Remove existing database before initialization"
|
||||
echo " --info Show database information after initialization"
|
||||
echo " --optimize Run database optimization after initialization"
|
||||
echo " --help Show this help message"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 # Initialize database (with backup if exists)"
|
||||
echo " $0 --force # Force reinitialize database"
|
||||
echo " $0 --info --optimize # Initialize with info and optimization"
|
||||
}
|
||||
|
||||
# Main execution
|
||||
main() {
|
||||
local force_flag=false
|
||||
local show_info=false
|
||||
local optimize=false
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--force)
|
||||
force_flag=true
|
||||
shift
|
||||
;;
|
||||
--info)
|
||||
show_info=true
|
||||
shift
|
||||
;;
|
||||
--optimize)
|
||||
optimize=true
|
||||
shift
|
||||
;;
|
||||
--help)
|
||||
print_usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
log_error "Unknown option: $1"
|
||||
print_usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
log_info "Starting C Nostr Relay database initialization..."
|
||||
|
||||
# Execute initialization steps
|
||||
check_sqlite
|
||||
create_db_directory
|
||||
|
||||
if [ "$force_flag" = false ]; then
|
||||
backup_existing_db
|
||||
fi
|
||||
|
||||
if [ "$force_flag" = true ]; then
|
||||
init_database --force
|
||||
else
|
||||
init_database
|
||||
fi
|
||||
|
||||
verify_database
|
||||
|
||||
if [ "$optimize" = true ]; then
|
||||
optimize_database
|
||||
fi
|
||||
|
||||
if [ "$show_info" = true ]; then
|
||||
show_db_info
|
||||
fi
|
||||
|
||||
log_success "Database initialization completed successfully!"
|
||||
echo ""
|
||||
echo "Database ready at: $DB_PATH"
|
||||
echo "You can now start your C Nostr Relay application."
|
||||
}
|
||||
|
||||
# Execute main function with all arguments
|
||||
main "$@"
|
||||
181
db/schema.sql
Normal file
181
db/schema.sql
Normal file
@@ -0,0 +1,181 @@
|
||||
-- C Nostr Relay Database Schema
|
||||
-- SQLite schema for storing Nostr events with JSON tags support
|
||||
|
||||
-- Schema version tracking
|
||||
PRAGMA user_version = 2;
|
||||
|
||||
-- Enable foreign key support
|
||||
PRAGMA foreign_keys = ON;
|
||||
|
||||
-- Optimize for performance
|
||||
PRAGMA journal_mode = WAL;
|
||||
PRAGMA synchronous = NORMAL;
|
||||
PRAGMA cache_size = 10000;
|
||||
|
||||
-- Core events table with hybrid single-table design
|
||||
CREATE TABLE events (
|
||||
id TEXT PRIMARY KEY, -- Nostr event ID (hex string)
|
||||
pubkey TEXT NOT NULL, -- Public key of event author (hex string)
|
||||
created_at INTEGER NOT NULL, -- Event creation timestamp (Unix timestamp)
|
||||
kind INTEGER NOT NULL, -- Event kind (0-65535)
|
||||
event_type TEXT NOT NULL CHECK (event_type IN ('regular', 'replaceable', 'ephemeral', 'addressable')),
|
||||
content TEXT NOT NULL, -- Event content (text content only)
|
||||
sig TEXT NOT NULL, -- Event signature (hex string)
|
||||
tags JSON NOT NULL DEFAULT '[]', -- Event tags as JSON array
|
||||
first_seen INTEGER NOT NULL DEFAULT (strftime('%s', 'now')) -- When relay received event
|
||||
);
|
||||
|
||||
-- Core performance indexes
|
||||
CREATE INDEX idx_events_pubkey ON events(pubkey);
|
||||
CREATE INDEX idx_events_kind ON events(kind);
|
||||
CREATE INDEX idx_events_created_at ON events(created_at DESC);
|
||||
CREATE INDEX idx_events_event_type ON events(event_type);
|
||||
|
||||
-- Composite indexes for common query patterns
|
||||
CREATE INDEX idx_events_kind_created_at ON events(kind, created_at DESC);
|
||||
CREATE INDEX idx_events_pubkey_created_at ON events(pubkey, created_at DESC);
|
||||
CREATE INDEX idx_events_pubkey_kind ON events(pubkey, kind);
|
||||
|
||||
-- Schema information table
|
||||
CREATE TABLE schema_info (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL,
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now'))
|
||||
);
|
||||
|
||||
-- Insert schema metadata
|
||||
INSERT INTO schema_info (key, value) VALUES
|
||||
('version', '2'),
|
||||
('description', 'Hybrid single-table Nostr relay schema with JSON tags'),
|
||||
('created_at', strftime('%s', 'now'));
|
||||
|
||||
-- Helper views for common queries
|
||||
CREATE VIEW recent_events AS
|
||||
SELECT id, pubkey, created_at, kind, event_type, content
|
||||
FROM events
|
||||
WHERE event_type != 'ephemeral'
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1000;
|
||||
|
||||
CREATE VIEW event_stats AS
|
||||
SELECT
|
||||
event_type,
|
||||
COUNT(*) as count,
|
||||
AVG(length(content)) as avg_content_length,
|
||||
MIN(created_at) as earliest,
|
||||
MAX(created_at) as latest
|
||||
FROM events
|
||||
GROUP BY event_type;
|
||||
|
||||
-- Optimization: Trigger for automatic cleanup of ephemeral events older than 1 hour
|
||||
CREATE TRIGGER cleanup_ephemeral_events
|
||||
AFTER INSERT ON events
|
||||
WHEN NEW.event_type = 'ephemeral'
|
||||
BEGIN
|
||||
DELETE FROM events
|
||||
WHERE event_type = 'ephemeral'
|
||||
AND first_seen < (strftime('%s', 'now') - 3600);
|
||||
END;
|
||||
|
||||
-- Replaceable event handling trigger
|
||||
CREATE TRIGGER handle_replaceable_events
|
||||
AFTER INSERT ON events
|
||||
WHEN NEW.event_type = 'replaceable'
|
||||
BEGIN
|
||||
DELETE FROM events
|
||||
WHERE pubkey = NEW.pubkey
|
||||
AND kind = NEW.kind
|
||||
AND event_type = 'replaceable'
|
||||
AND id != NEW.id;
|
||||
END;
|
||||
|
||||
-- Persistent Subscriptions Logging Tables (Phase 2)
|
||||
-- Optional database logging for subscription analytics and debugging
|
||||
|
||||
-- Subscription events log
|
||||
CREATE TABLE subscription_events (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
subscription_id TEXT NOT NULL, -- Subscription ID from client
|
||||
client_ip TEXT NOT NULL, -- Client IP address
|
||||
event_type TEXT NOT NULL CHECK (event_type IN ('created', 'closed', 'expired', 'disconnected')),
|
||||
filter_json TEXT, -- JSON representation of filters (for created events)
|
||||
events_sent INTEGER DEFAULT 0, -- Number of events sent to this subscription
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
||||
ended_at INTEGER, -- When subscription ended (for closed/expired/disconnected)
|
||||
duration INTEGER -- Computed: ended_at - created_at
|
||||
);
|
||||
|
||||
-- Subscription metrics summary
|
||||
CREATE TABLE subscription_metrics (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
date TEXT NOT NULL, -- Date (YYYY-MM-DD)
|
||||
total_created INTEGER DEFAULT 0, -- Total subscriptions created
|
||||
total_closed INTEGER DEFAULT 0, -- Total subscriptions closed
|
||||
total_events_broadcast INTEGER DEFAULT 0, -- Total events broadcast
|
||||
avg_duration REAL DEFAULT 0, -- Average subscription duration
|
||||
peak_concurrent INTEGER DEFAULT 0, -- Peak concurrent subscriptions
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
||||
UNIQUE(date)
|
||||
);
|
||||
|
||||
-- Event broadcasting log (optional, for detailed analytics)
|
||||
CREATE TABLE event_broadcasts (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
event_id TEXT NOT NULL, -- Event ID that was broadcast
|
||||
subscription_id TEXT NOT NULL, -- Subscription that received it
|
||||
client_ip TEXT NOT NULL, -- Client IP
|
||||
broadcast_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),
|
||||
FOREIGN KEY (event_id) REFERENCES events(id)
|
||||
);
|
||||
|
||||
-- Indexes for subscription logging performance
|
||||
CREATE INDEX idx_subscription_events_id ON subscription_events(subscription_id);
|
||||
CREATE INDEX idx_subscription_events_type ON subscription_events(event_type);
|
||||
CREATE INDEX idx_subscription_events_created ON subscription_events(created_at DESC);
|
||||
CREATE INDEX idx_subscription_events_client ON subscription_events(client_ip);
|
||||
|
||||
CREATE INDEX idx_subscription_metrics_date ON subscription_metrics(date DESC);
|
||||
|
||||
CREATE INDEX idx_event_broadcasts_event ON event_broadcasts(event_id);
|
||||
CREATE INDEX idx_event_broadcasts_sub ON event_broadcasts(subscription_id);
|
||||
CREATE INDEX idx_event_broadcasts_time ON event_broadcasts(broadcast_at DESC);
|
||||
|
||||
-- Trigger to update subscription duration when ended
|
||||
CREATE TRIGGER update_subscription_duration
|
||||
AFTER UPDATE OF ended_at ON subscription_events
|
||||
WHEN NEW.ended_at IS NOT NULL AND OLD.ended_at IS NULL
|
||||
BEGIN
|
||||
UPDATE subscription_events
|
||||
SET duration = NEW.ended_at - NEW.created_at
|
||||
WHERE id = NEW.id;
|
||||
END;
|
||||
|
||||
-- View for subscription analytics
|
||||
CREATE VIEW subscription_analytics AS
|
||||
SELECT
|
||||
date(created_at, 'unixepoch') as date,
|
||||
COUNT(*) as subscriptions_created,
|
||||
COUNT(CASE WHEN ended_at IS NOT NULL THEN 1 END) as subscriptions_ended,
|
||||
AVG(CASE WHEN duration IS NOT NULL THEN duration END) as avg_duration_seconds,
|
||||
MAX(events_sent) as max_events_sent,
|
||||
AVG(events_sent) as avg_events_sent,
|
||||
COUNT(DISTINCT client_ip) as unique_clients
|
||||
FROM subscription_events
|
||||
GROUP BY date(created_at, 'unixepoch')
|
||||
ORDER BY date DESC;
|
||||
|
||||
-- View for current active subscriptions (from log perspective)
|
||||
CREATE VIEW active_subscriptions_log AS
|
||||
SELECT
|
||||
subscription_id,
|
||||
client_ip,
|
||||
filter_json,
|
||||
events_sent,
|
||||
created_at,
|
||||
(strftime('%s', 'now') - created_at) as duration_seconds
|
||||
FROM subscription_events
|
||||
WHERE event_type = 'created'
|
||||
AND subscription_id NOT IN (
|
||||
SELECT subscription_id FROM subscription_events
|
||||
WHERE event_type IN ('closed', 'expired', 'disconnected')
|
||||
);
|
||||
108
make_and_restart_relay.sh
Executable file
108
make_and_restart_relay.sh
Executable file
@@ -0,0 +1,108 @@
|
||||
#!/bin/bash
|
||||
|
||||
# C-Relay Build and Restart Script
|
||||
# Builds the project first, then stops any running relay and starts a new one in the background
|
||||
|
||||
echo "=== C Nostr Relay Build and Restart Script ==="
|
||||
|
||||
# Build the project first
|
||||
echo "Building project..."
|
||||
make clean all
|
||||
|
||||
# Check if build was successful
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR: Build failed. Cannot restart relay."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if relay binary exists after build - detect architecture
|
||||
ARCH=$(uname -m)
|
||||
case "$ARCH" in
|
||||
x86_64)
|
||||
BINARY_PATH="./build/c_relay_x86"
|
||||
;;
|
||||
aarch64|arm64)
|
||||
BINARY_PATH="./build/c_relay_arm64"
|
||||
;;
|
||||
*)
|
||||
BINARY_PATH="./build/c_relay_$ARCH"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ ! -f "$BINARY_PATH" ]; then
|
||||
echo "ERROR: Relay binary not found at $BINARY_PATH after build. Build may have failed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Build successful. Proceeding with relay restart..."
|
||||
|
||||
# Kill existing relay if running
|
||||
echo "Stopping any existing relay servers..."
|
||||
pkill -f "c_relay_" 2>/dev/null
|
||||
sleep 2 # Give time for shutdown
|
||||
|
||||
# Check if port is still bound
|
||||
if lsof -i :8888 >/dev/null 2>&1; then
|
||||
echo "Port 8888 still in use, force killing..."
|
||||
fuser -k 8888/tcp 2>/dev/null || echo "No process on port 8888"
|
||||
fi
|
||||
|
||||
# Get any remaining processes
|
||||
REMAINING_PIDS=$(pgrep -f "c_relay_" || echo "")
|
||||
if [ -n "$REMAINING_PIDS" ]; then
|
||||
echo "Force killing remaining processes: $REMAINING_PIDS"
|
||||
kill -9 $REMAINING_PIDS 2>/dev/null
|
||||
sleep 1
|
||||
else
|
||||
echo "No existing relay found"
|
||||
fi
|
||||
|
||||
# Clean up PID file
|
||||
rm -f relay.pid
|
||||
|
||||
# Initialize database if needed
|
||||
if [ ! -f "./db/c_nostr_relay.db" ]; then
|
||||
echo "Initializing database..."
|
||||
./db/init.sh --force >/dev/null 2>&1
|
||||
fi
|
||||
|
||||
# Start relay in background with output redirection
|
||||
echo "Starting relay server..."
|
||||
echo "Debug: Current processes: $(ps aux | grep 'c_relay_' | grep -v grep || echo 'None')"
|
||||
|
||||
# Start relay in background and capture its PID
|
||||
$BINARY_PATH > relay.log 2>&1 &
|
||||
RELAY_PID=$!
|
||||
|
||||
echo "Started with PID: $RELAY_PID"
|
||||
|
||||
# Check if server is still running after short delay
|
||||
sleep 3
|
||||
|
||||
# Check if process is still alive
|
||||
if ps -p "$RELAY_PID" >/dev/null 2>&1; then
|
||||
echo "Relay started successfully!"
|
||||
echo "PID: $RELAY_PID"
|
||||
echo "WebSocket endpoint: ws://127.0.0.1:8888"
|
||||
echo "Log file: relay.log"
|
||||
echo ""
|
||||
|
||||
# Save PID for debugging
|
||||
echo $RELAY_PID > relay.pid
|
||||
|
||||
echo "=== Relay server running in background ==="
|
||||
echo "To kill relay: pkill -f 'c_relay_'"
|
||||
echo "To check status: ps aux | grep c_relay_"
|
||||
echo "To view logs: tail -f relay.log"
|
||||
echo "Binary: $BINARY_PATH"
|
||||
echo "Ready for Nostr client connections!"
|
||||
else
|
||||
echo "ERROR: Relay failed to start"
|
||||
echo "Debug: Check relay.log for error details:"
|
||||
echo "--- Last 10 lines of relay.log ---"
|
||||
tail -n 10 relay.log 2>/dev/null || echo "No log file found"
|
||||
echo "--- End log ---"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
1
nips
Submodule
1
nips
Submodule
Submodule nips added at 8c45ff5d96
1
nostr_core_lib
Submodule
1
nostr_core_lib
Submodule
Submodule nostr_core_lib added at 33129d82fd
1
otp
Submodule
1
otp
Submodule
Submodule otp added at 3d990091eb
131
relay.log
Normal file
131
relay.log
Normal file
@@ -0,0 +1,131 @@
|
||||
[34m[1m=== C Nostr Relay Server ===[0m
|
||||
[32m[SUCCESS][0m Database connection established
|
||||
[34m[INFO][0m Starting relay server...
|
||||
[34m[INFO][0m Starting libwebsockets-based Nostr relay server...
|
||||
[32m[SUCCESS][0m WebSocket relay started on ws://127.0.0.1:8888
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||
[32m[SUCCESS][0m Event stored in database
|
||||
[32m[SUCCESS][0m Event validated and stored successfully
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||
[32m[SUCCESS][0m Event stored in database
|
||||
[32m[SUCCESS][0m Event validated and stored successfully
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||
[32m[SUCCESS][0m Event stored in database
|
||||
[32m[SUCCESS][0m Event validated and stored successfully
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[33m[WARNING][0m Subscription 'exists_1757082297' not found for removal
|
||||
[34m[INFO][0m Closed subscription: exists_1757082297
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[33m[WARNING][0m Subscription 'exists_1757082298' not found for removal
|
||||
[34m[INFO][0m Closed subscription: exists_1757082298
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||
[34m[INFO][0m Event not found for deletion: [34m[INFO][0m ...
|
||||
[34m[INFO][0m Event not found for deletion: [34m[INFO][0m ...
|
||||
[32m[SUCCESS][0m Event stored in database
|
||||
[34m[INFO][0m Deletion request processed: 0 events deleted
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[33m[WARNING][0m Subscription 'exists_1757082301' not found for removal
|
||||
[34m[INFO][0m Closed subscription: exists_1757082301
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[33m[WARNING][0m Subscription 'exists_1757082301' not found for removal
|
||||
[34m[INFO][0m Closed subscription: exists_1757082301
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[33m[WARNING][0m Subscription 'exists_1757082301' not found for removal
|
||||
[34m[INFO][0m Closed subscription: exists_1757082301
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||
[32m[SUCCESS][0m Event stored in database
|
||||
[34m[INFO][0m Deletion request processed: 0 events deleted
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[33m[WARNING][0m Subscription 'exists_1757082305' not found for removal
|
||||
[34m[INFO][0m Closed subscription: exists_1757082305
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||
[34m[INFO][0m Event not found for deletion: [31m✗[0m Cou...
|
||||
[32m[SUCCESS][0m Event stored in database
|
||||
[34m[INFO][0m Deletion request processed: 0 events deleted
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Handling REQ message for persistent subscription
|
||||
[34m[INFO][0m Added subscription 'exists_1757082309' (total: 1)
|
||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 ORDER BY created_at DESC LIMIT 500
|
||||
[34m[INFO][0m Query returned 25 rows
|
||||
[34m[INFO][0m Total events sent: 25
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Removed subscription 'exists_1757082309' (total: 0)
|
||||
[34m[INFO][0m Closed subscription: exists_1757082309
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[33m[WARNING][0m Subscription 'z[<5B><>.Y' not found for removal
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Handling EVENT message with full NIP-01 validation
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[34m[INFO][0m WebSocket connection established
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Handling REQ message for persistent subscription
|
||||
[34m[INFO][0m Added subscription 'kind5_1757082309' (total: 1)
|
||||
[34m[INFO][0m Executing SQL: SELECT id, pubkey, created_at, kind, content, sig, tags FROM events WHERE 1=1 AND kind IN (5) ORDER BY created_at DESC LIMIT 500
|
||||
[34m[INFO][0m Query returned 3 rows
|
||||
[34m[INFO][0m Total events sent: 3
|
||||
[34m[INFO][0m Received WebSocket message
|
||||
[34m[INFO][0m Removed subscription 'kind5_1757082309' (total: 0)
|
||||
[34m[INFO][0m Closed subscription: kind5_1757082309
|
||||
[34m[INFO][0m WebSocket connection closed
|
||||
[33m[WARNING][0m Subscription '<27>f<EFBFBD><66>.Y' not found for removal
|
||||
2194
src/main.c
Normal file
2194
src/main.c
Normal file
File diff suppressed because it is too large
Load Diff
357
tests/1_nip_test.sh
Executable file
357
tests/1_nip_test.sh
Executable file
@@ -0,0 +1,357 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Comprehensive C-Relay Test - Test event types and subscriptions
|
||||
# Uses nak to generate and publish various event types, then tests subscriptions
|
||||
|
||||
set -e # Exit on any error
|
||||
|
||||
# Color constants
|
||||
RED='\033[31m'
|
||||
GREEN='\033[32m'
|
||||
YELLOW='\033[33m'
|
||||
BLUE='\033[34m'
|
||||
BOLD='\033[1m'
|
||||
RESET='\033[0m'
|
||||
|
||||
# Test configuration
|
||||
RELAY_URL="ws://127.0.0.1:8888"
|
||||
TEST_PRIVATE_KEY="nsec1j4c6269y9w0q2er2xjw8sv2ehyrtfxq3jwgdlxj6qfn8z4gjsq5qfvfk99"
|
||||
|
||||
# Print functions
|
||||
print_header() {
|
||||
echo -e "${BLUE}${BOLD}=== $1 ===${RESET}"
|
||||
}
|
||||
|
||||
print_step() {
|
||||
echo -e "${YELLOW}[STEP]${RESET} $1"
|
||||
}
|
||||
|
||||
print_success() {
|
||||
echo -e "${GREEN}✓${RESET} $1"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
echo -e "${RED}✗${RESET} $1"
|
||||
}
|
||||
|
||||
print_info() {
|
||||
echo -e "${BLUE}[INFO]${RESET} $1"
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
echo -e "${YELLOW}[WARNING]${RESET} $1"
|
||||
}
|
||||
|
||||
# Global arrays to store event IDs for subscription tests
|
||||
declare -a REGULAR_EVENT_IDS=()
|
||||
declare -a REPLACEABLE_EVENT_IDS=()
|
||||
declare -a EPHEMERAL_EVENT_IDS=()
|
||||
declare -a ADDRESSABLE_EVENT_IDS=()
|
||||
|
||||
# Helper function to publish event and extract ID
|
||||
publish_event() {
|
||||
local event_json="$1"
|
||||
local event_type="$2"
|
||||
local description="$3"
|
||||
|
||||
# Extract event ID
|
||||
local event_id=$(echo "$event_json" | jq -r '.id' 2>/dev/null)
|
||||
if [[ "$event_id" == "null" || -z "$event_id" ]]; then
|
||||
print_error "Could not extract event ID from $description"
|
||||
return 1
|
||||
fi
|
||||
|
||||
print_info "Publishing $description..."
|
||||
|
||||
# Create EVENT message in Nostr format
|
||||
local event_message="[\"EVENT\",$event_json]"
|
||||
|
||||
# Publish to relay
|
||||
local response=""
|
||||
if command -v websocat &> /dev/null; then
|
||||
response=$(echo "$event_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "Connection failed")
|
||||
else
|
||||
print_error "websocat not found - required for testing"
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
# Check response
|
||||
if [[ "$response" == *"Connection failed"* ]]; then
|
||||
print_error "Failed to connect to relay for $description"
|
||||
return 1
|
||||
elif [[ "$response" == *"true"* ]]; then
|
||||
print_success "$description uploaded (ID: ${event_id:0:16}...)"
|
||||
|
||||
# Store event ID in appropriate array
|
||||
case "$event_type" in
|
||||
"regular") REGULAR_EVENT_IDS+=("$event_id") ;;
|
||||
"replaceable") REPLACEABLE_EVENT_IDS+=("$event_id") ;;
|
||||
"ephemeral") EPHEMERAL_EVENT_IDS+=("$event_id") ;;
|
||||
"addressable") ADDRESSABLE_EVENT_IDS+=("$event_id") ;;
|
||||
esac
|
||||
echo # Add blank line for readability
|
||||
return 0
|
||||
else
|
||||
print_warning "$description might have failed: $response"
|
||||
echo # Add blank line for readability
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Helper function to publish invalid event and expect rejection
|
||||
publish_invalid_event() {
|
||||
local event_json="$1"
|
||||
local description="$2"
|
||||
local expected_error="$3"
|
||||
|
||||
print_info "Publishing invalid $description..."
|
||||
|
||||
# Create EVENT message in Nostr format
|
||||
local event_message="[\"EVENT\",$event_json]"
|
||||
|
||||
# Publish to relay
|
||||
local response=""
|
||||
if command -v websocat &> /dev/null; then
|
||||
response=$(echo "$event_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "Connection failed")
|
||||
else
|
||||
print_error "websocat not found - required for testing"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check response - should contain "false" and error message
|
||||
if [[ "$response" == *"Connection failed"* ]]; then
|
||||
print_error "Failed to connect to relay for $description"
|
||||
return 1
|
||||
elif [[ "$response" == *"false"* ]]; then
|
||||
# Extract error message
|
||||
local error_msg=$(echo "$response" | grep -o '"[^"]*invalid[^"]*"' | head -1 | sed 's/"//g' 2>/dev/null || echo "rejected")
|
||||
print_success "$description correctly rejected: $error_msg"
|
||||
echo # Add blank line for readability
|
||||
return 0
|
||||
elif [[ "$response" == *"true"* ]]; then
|
||||
print_error "$description was incorrectly accepted (should have been rejected)"
|
||||
echo # Add blank line for readability
|
||||
return 1
|
||||
else
|
||||
print_warning "$description response unclear: $response"
|
||||
echo # Add blank line for readability
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Test subscription with filters
|
||||
test_subscription() {
|
||||
local sub_id="$1"
|
||||
local filter="$2"
|
||||
local description="$3"
|
||||
local expected_count="$4"
|
||||
|
||||
print_step "Testing subscription: $description"
|
||||
|
||||
# Create REQ message
|
||||
local req_message="[\"REQ\",\"$sub_id\",$filter]"
|
||||
|
||||
print_info "Testing filter: $filter"
|
||||
|
||||
# Send subscription and collect events
|
||||
local response=""
|
||||
if command -v websocat &> /dev/null; then
|
||||
response=$(echo -e "$req_message\n[\"CLOSE\",\"$sub_id\"]" | timeout 3s websocat "$RELAY_URL" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
|
||||
# Count EVENT responses (lines containing ["EVENT","sub_id",...])
|
||||
local event_count=0
|
||||
if [[ -n "$response" ]]; then
|
||||
event_count=$(echo "$response" | grep -c "\"EVENT\"" 2>/dev/null || echo "0")
|
||||
fi
|
||||
|
||||
if [[ "$expected_count" == "any" ]]; then
|
||||
if [[ $event_count -gt 0 ]]; then
|
||||
print_success "$description - Found $event_count events"
|
||||
else
|
||||
print_warning "$description - No events found"
|
||||
fi
|
||||
elif [[ $event_count -eq $expected_count ]]; then
|
||||
print_success "$description - Found expected $event_count events"
|
||||
else
|
||||
print_warning "$description - Expected $expected_count events, found $event_count"
|
||||
fi
|
||||
|
||||
# Show a few sample events for verification (first 2)
|
||||
if [[ $event_count -gt 0 && "$description" == "All events" ]]; then
|
||||
print_info "Sample events (first 2):"
|
||||
echo "$response" | grep "\"EVENT\"" | head -2 | while IFS= read -r line; do
|
||||
local event_content=$(echo "$line" | jq -r '.[2].content' 2>/dev/null || echo "N/A")
|
||||
local event_kind=$(echo "$line" | jq -r '.[2].kind' 2>/dev/null || echo "N/A")
|
||||
local event_id=$(echo "$line" | jq -r '.[2].id' 2>/dev/null || echo "N/A")
|
||||
echo " - ID: ${event_id:0:16}... Kind: $event_kind Content: ${event_content:0:30}..."
|
||||
done
|
||||
fi
|
||||
|
||||
echo # Add blank line for readability
|
||||
return 0
|
||||
}
|
||||
|
||||
# Main test function
|
||||
run_comprehensive_test() {
|
||||
print_header "C-Relay Comprehensive Test"
|
||||
|
||||
# Check dependencies
|
||||
print_step "Checking dependencies..."
|
||||
if ! command -v nak &> /dev/null; then
|
||||
print_error "nak command not found"
|
||||
print_info "Please install nak: go install github.com/fiatjaf/nak@latest"
|
||||
return 1
|
||||
fi
|
||||
if ! command -v websocat &> /dev/null; then
|
||||
print_error "websocat command not found"
|
||||
print_info "Please install websocat for testing"
|
||||
return 1
|
||||
fi
|
||||
if ! command -v jq &> /dev/null; then
|
||||
print_error "jq command not found"
|
||||
print_info "Please install jq for JSON processing"
|
||||
return 1
|
||||
fi
|
||||
print_success "All dependencies found"
|
||||
|
||||
print_header "PHASE 1: Publishing Various Event Types"
|
||||
|
||||
# Test 1: Regular Events (kind 1)
|
||||
print_step "Creating regular events (kind 1)..."
|
||||
local regular1=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Regular event #1" -k 1 --ts $(($(date +%s) - 100)) -t "type=regular" -t "test=phase1" 2>/dev/null)
|
||||
local regular2=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Regular event #2 with tags" -k 1 --ts $(($(date +%s) - 90)) -e "previous_event_id" -p "test_pubkey" -t "type=regular" -t "test=phase1" 2>/dev/null)
|
||||
|
||||
publish_event "$regular1" "regular" "Regular event #1"
|
||||
publish_event "$regular2" "regular" "Regular event #2"
|
||||
|
||||
# Test 2: Replaceable Events (kind 0 - metadata)
|
||||
print_step "Creating replaceable events (kind 0)..."
|
||||
local replaceable1=$(nak event --sec "$TEST_PRIVATE_KEY" -c '{"name":"Test User","about":"Testing C-Relay"}' -k 0 --ts $(($(date +%s) - 80)) -t "type=replaceable" 2>/dev/null)
|
||||
local replaceable2=$(nak event --sec "$TEST_PRIVATE_KEY" -c '{"name":"Test User Updated","about":"Updated profile"}' -k 0 --ts $(($(date +%s) - 70)) -t "type=replaceable" 2>/dev/null)
|
||||
|
||||
publish_event "$replaceable1" "replaceable" "Replaceable event #1 (metadata)"
|
||||
publish_event "$replaceable2" "replaceable" "Replaceable event #2 (metadata update)"
|
||||
|
||||
# Test 3: Ephemeral Events (kind 20000+)
|
||||
print_step "Creating ephemeral events (kind 20001)..."
|
||||
local ephemeral1=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Ephemeral event - should not be stored permanently" -k 20001 --ts $(date +%s) -t "type=ephemeral" 2>/dev/null)
|
||||
|
||||
publish_event "$ephemeral1" "ephemeral" "Ephemeral event"
|
||||
|
||||
# Test 4: Addressable Events (kind 30000+)
|
||||
print_step "Creating addressable events (kind 30001)..."
|
||||
local addressable1=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Addressable event with d-tag" -k 30001 --ts $(($(date +%s) - 50)) -t "d=test-article" -t "type=addressable" 2>/dev/null)
|
||||
local addressable2=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Updated addressable event" -k 30001 --ts $(($(date +%s) - 40)) -t "d=test-article" -t "type=addressable" -t "updated=true" 2>/dev/null)
|
||||
|
||||
publish_event "$addressable1" "addressable" "Addressable event #1"
|
||||
publish_event "$addressable2" "addressable" "Addressable event #2 (update)"
|
||||
|
||||
# Brief pause to let events settle
|
||||
sleep 2
|
||||
|
||||
print_header "PHASE 2: Testing Invalid Events (NIP-01 Validation)"
|
||||
|
||||
print_step "Testing various invalid events that should be rejected..."
|
||||
|
||||
# Test 1: Event with invalid JSON structure (malformed)
|
||||
local malformed_event='{"id":"invalid","pubkey":"invalid_pubkey","created_at":"not_a_number","kind":1,"tags":[],"content":"test"}'
|
||||
publish_invalid_event "$malformed_event" "malformed event with invalid created_at" "invalid"
|
||||
|
||||
# Test 2: Event with missing required fields
|
||||
local missing_field_event='{"id":"test123","pubkey":"valid_pubkey","kind":1,"tags":[],"content":"test"}'
|
||||
publish_invalid_event "$missing_field_event" "event missing created_at and sig" "invalid"
|
||||
|
||||
# Test 3: Event with invalid pubkey format (not hex)
|
||||
local invalid_pubkey_event='{"id":"abc123","pubkey":"not_valid_hex_pubkey","created_at":1234567890,"kind":1,"tags":[],"content":"test","sig":"fake_sig"}'
|
||||
publish_invalid_event "$invalid_pubkey_event" "event with invalid pubkey format" "invalid"
|
||||
|
||||
# Test 4: Event with invalid event ID format
|
||||
local invalid_id_event='{"id":"not_64_char_hex","pubkey":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","created_at":1234567890,"kind":1,"tags":[],"content":"test","sig":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"}'
|
||||
publish_invalid_event "$invalid_id_event" "event with invalid ID format" "invalid"
|
||||
|
||||
# Test 5: Event with invalid signature
|
||||
local invalid_sig_event='{"id":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","pubkey":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","created_at":1234567890,"kind":1,"tags":[],"content":"test","sig":"invalid_signature_format"}'
|
||||
publish_invalid_event "$invalid_sig_event" "event with invalid signature format" "invalid"
|
||||
|
||||
# Test 6: Event with invalid kind (negative)
|
||||
local invalid_kind_event='{"id":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","pubkey":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","created_at":1234567890,"kind":-1,"tags":[],"content":"test","sig":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"}'
|
||||
publish_invalid_event "$invalid_kind_event" "event with negative kind" "invalid"
|
||||
|
||||
# Test 7: Event with invalid tags format (not array)
|
||||
local invalid_tags_event='{"id":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","pubkey":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","created_at":1234567890,"kind":1,"tags":"not_an_array","content":"test","sig":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"}'
|
||||
publish_invalid_event "$invalid_tags_event" "event with invalid tags format" "invalid"
|
||||
|
||||
print_success "Invalid event tests completed - all should have been rejected"
|
||||
|
||||
print_header "PHASE 3: Testing Subscriptions and Filters"
|
||||
|
||||
# Test subscription filters
|
||||
print_step "Testing various subscription filters..."
|
||||
|
||||
# Test 1: Get all events
|
||||
test_subscription "test_all" '{}' "All events" "any"
|
||||
|
||||
# Test 2: Get events by kind
|
||||
test_subscription "test_kind1" '{"kinds":[1]}' "Kind 1 events only" "2"
|
||||
test_subscription "test_kind0" '{"kinds":[0]}' "Kind 0 events only" "any"
|
||||
|
||||
# Test 3: Get events by author (pubkey)
|
||||
local test_pubkey=$(echo "$regular1" | jq -r '.pubkey' 2>/dev/null)
|
||||
test_subscription "test_author" "{\"authors\":[\"$test_pubkey\"]}" "Events by specific author" "any"
|
||||
|
||||
# Test 4: Get recent events (time-based)
|
||||
local recent_timestamp=$(($(date +%s) - 200))
|
||||
test_subscription "test_recent" "{\"since\":$recent_timestamp}" "Recent events" "any"
|
||||
|
||||
# Test 5: Get events with specific tags
|
||||
test_subscription "test_tag_type" '{"#type":["regular"]}' "Events with type=regular tag" "any"
|
||||
|
||||
# Test 6: Multiple kinds
|
||||
test_subscription "test_multi_kinds" '{"kinds":[0,1]}' "Multiple kinds (0,1)" "any"
|
||||
|
||||
# Test 7: Limit results
|
||||
test_subscription "test_limit" '{"kinds":[1],"limit":1}' "Limited to 1 event" "1"
|
||||
|
||||
print_header "PHASE 4: Database Verification"
|
||||
|
||||
# Check what's actually stored in the database
|
||||
print_step "Verifying database contents..."
|
||||
|
||||
if command -v sqlite3 &> /dev/null; then
|
||||
print_info "Events by type in database:"
|
||||
sqlite3 db/c_nostr_relay.db "SELECT event_type, COUNT(*) as count FROM events GROUP BY event_type;" | while read line; do
|
||||
echo " $line"
|
||||
done
|
||||
|
||||
print_info "Recent events in database:"
|
||||
sqlite3 db/c_nostr_relay.db "SELECT substr(id, 1, 16) || '...' as short_id, event_type, kind, substr(content, 1, 30) || '...' as short_content FROM events ORDER BY created_at DESC LIMIT 5;" | while read line; do
|
||||
echo " $line"
|
||||
done
|
||||
|
||||
print_success "Database verification complete"
|
||||
else
|
||||
print_warning "sqlite3 not available for database verification"
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# Run the comprehensive test
|
||||
print_header "Starting C-Relay Comprehensive Test Suite with NIP-01 Validation"
|
||||
echo
|
||||
|
||||
if run_comprehensive_test; then
|
||||
echo
|
||||
print_success "All tests completed successfully!"
|
||||
print_info "The C-Relay with full NIP-01 validation is working correctly"
|
||||
print_info "✅ Event validation, signature verification, and error handling all working"
|
||||
echo
|
||||
exit 0
|
||||
else
|
||||
echo
|
||||
print_error "Some tests failed"
|
||||
exit 1
|
||||
fi
|
||||
386
tests/9_delete_test.sh
Executable file
386
tests/9_delete_test.sh
Executable file
@@ -0,0 +1,386 @@
|
||||
#!/bin/bash
|
||||
|
||||
# NIP-09 Event Deletion Request Test for C-Relay
|
||||
# Tests deletion request functionality - assumes relay is already running
|
||||
# Based on the pattern from 1_nip_test.sh
|
||||
|
||||
set -e
|
||||
|
||||
# Color constants
|
||||
RED='\033[31m'
|
||||
GREEN='\033[32m'
|
||||
YELLOW='\033[33m'
|
||||
BLUE='\033[34m'
|
||||
BOLD='\033[1m'
|
||||
RESET='\033[0m'
|
||||
|
||||
# Test configuration
|
||||
RELAY_URL="ws://127.0.0.1:8888"
|
||||
TEST_PRIVATE_KEY="nsec1j4c6269y9w0q2er2xjw8sv2ehyrtfxq3jwgdlxj6qfn8z4gjsq5qfvfk99"
|
||||
|
||||
# Print functions
|
||||
print_header() {
|
||||
echo -e "${BLUE}${BOLD}=== $1 ===${RESET}"
|
||||
}
|
||||
|
||||
print_step() {
|
||||
echo -e "${YELLOW}[STEP]${RESET} $1"
|
||||
}
|
||||
|
||||
print_success() {
|
||||
echo -e "${GREEN}✓${RESET} $1"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
echo -e "${RED}✗${RESET} $1"
|
||||
}
|
||||
|
||||
print_info() {
|
||||
echo -e "${BLUE}[INFO]${RESET} $1"
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
echo -e "${YELLOW}[WARNING]${RESET} $1"
|
||||
}
|
||||
|
||||
# Helper function to publish event and extract ID
|
||||
publish_event() {
|
||||
local event_json="$1"
|
||||
local description="$2"
|
||||
|
||||
# Extract event ID
|
||||
local event_id=$(echo "$event_json" | jq -r '.id' 2>/dev/null)
|
||||
if [[ "$event_id" == "null" || -z "$event_id" ]]; then
|
||||
print_error "Could not extract event ID from $description"
|
||||
return 1
|
||||
fi
|
||||
|
||||
print_info "Publishing $description..."
|
||||
|
||||
# Create EVENT message in Nostr format
|
||||
local event_message="[\"EVENT\",$event_json]"
|
||||
|
||||
# Publish to relay
|
||||
local response=""
|
||||
if command -v websocat &> /dev/null; then
|
||||
response=$(echo "$event_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "Connection failed")
|
||||
else
|
||||
print_error "websocat not found - required for testing"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check response
|
||||
if [[ "$response" == *"Connection failed"* ]]; then
|
||||
print_error "Failed to connect to relay for $description"
|
||||
return 1
|
||||
elif [[ "$response" == *"true"* ]]; then
|
||||
print_success "$description uploaded (ID: ${event_id:0:16}...)"
|
||||
echo "$event_id"
|
||||
return 0
|
||||
else
|
||||
print_warning "$description might have failed: $response"
|
||||
echo ""
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Helper function to publish deletion request
|
||||
publish_deletion_request() {
|
||||
local deletion_event_json="$1"
|
||||
local description="$2"
|
||||
|
||||
# Extract event ID
|
||||
local event_id=$(echo "$deletion_event_json" | jq -r '.id' 2>/dev/null)
|
||||
if [[ "$event_id" == "null" || -z "$event_id" ]]; then
|
||||
print_error "Could not extract event ID from $description"
|
||||
return 1
|
||||
fi
|
||||
|
||||
print_info "Publishing $description..."
|
||||
|
||||
# Create EVENT message in Nostr format
|
||||
local event_message="[\"EVENT\",$deletion_event_json]"
|
||||
|
||||
# Publish to relay
|
||||
local response=""
|
||||
if command -v websocat &> /dev/null; then
|
||||
response=$(echo "$event_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "Connection failed")
|
||||
else
|
||||
print_error "websocat not found - required for testing"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check response
|
||||
if [[ "$response" == *"Connection failed"* ]]; then
|
||||
print_error "Failed to connect to relay for $description"
|
||||
return 1
|
||||
elif [[ "$response" == *"true"* ]]; then
|
||||
print_success "$description accepted (ID: ${event_id:0:16}...)"
|
||||
echo "$event_id"
|
||||
return 0
|
||||
else
|
||||
print_warning "$description might have failed: $response"
|
||||
echo ""
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Helper function to check if event exists via subscription
|
||||
check_event_exists() {
|
||||
local event_id="$1"
|
||||
local sub_id="exists_$(date +%s%N | cut -c1-10)"
|
||||
|
||||
# Create REQ message to query for specific event ID
|
||||
local req_message="[\"REQ\",\"$sub_id\",{\"ids\":[\"$event_id\"]}]"
|
||||
|
||||
# Send subscription and collect events
|
||||
local response=""
|
||||
if command -v websocat &> /dev/null; then
|
||||
response=$(echo -e "$req_message\n[\"CLOSE\",\"$sub_id\"]" | timeout 3s websocat "$RELAY_URL" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
# Count EVENT responses
|
||||
local event_count=0
|
||||
if [[ -n "$response" ]]; then
|
||||
event_count=$(echo "$response" | grep -c "\"EVENT\"" 2>/dev/null || echo "0")
|
||||
fi
|
||||
|
||||
echo "$event_count"
|
||||
}
|
||||
|
||||
# Helper function to query events by kind
|
||||
query_events_by_kind() {
|
||||
local kind="$1"
|
||||
local sub_id="kind${kind}_$(date +%s%N | cut -c1-10)"
|
||||
|
||||
# Create REQ message to query for events of specific kind
|
||||
local req_message="[\"REQ\",\"$sub_id\",{\"kinds\":[$kind]}]"
|
||||
|
||||
# Send subscription and collect events
|
||||
local response=""
|
||||
if command -v websocat &> /dev/null; then
|
||||
response=$(echo -e "$req_message\n[\"CLOSE\",\"$sub_id\"]" | timeout 3s websocat "$RELAY_URL" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
# Count EVENT responses
|
||||
local event_count=0
|
||||
if [[ -n "$response" ]]; then
|
||||
event_count=$(echo "$response" | grep -c "\"EVENT\"" 2>/dev/null || echo "0")
|
||||
fi
|
||||
|
||||
echo "$event_count"
|
||||
}
|
||||
|
||||
# Main test function
|
||||
run_deletion_test() {
|
||||
print_header "NIP-09 Event Deletion Request Test"
|
||||
|
||||
# Check dependencies
|
||||
print_step "Checking dependencies..."
|
||||
if ! command -v nak &> /dev/null; then
|
||||
print_error "nak command not found"
|
||||
print_info "Please install nak: go install github.com/fiatjaf/nak@latest"
|
||||
return 1
|
||||
fi
|
||||
if ! command -v websocat &> /dev/null; then
|
||||
print_error "websocat command not found"
|
||||
print_info "Please install websocat for testing"
|
||||
return 1
|
||||
fi
|
||||
if ! command -v jq &> /dev/null; then
|
||||
print_error "jq command not found"
|
||||
print_info "Please install jq for JSON processing"
|
||||
return 1
|
||||
fi
|
||||
print_success "All dependencies found"
|
||||
|
||||
print_header "PHASE 1: Publishing Events to be Deleted"
|
||||
|
||||
# Create test events that will be deleted
|
||||
print_step "Creating events for deletion testing..."
|
||||
|
||||
# Create regular events (kind 1) - these will be deleted by ID
|
||||
local event1=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Event to be deleted #1" -k 1 --ts $(($(date +%s) - 100)) -t "type=test" -t "phase=deletion" 2>/dev/null)
|
||||
local event2=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Event to be deleted #2" -k 1 --ts $(($(date +%s) - 90)) -t "type=test" -t "phase=deletion" 2>/dev/null)
|
||||
|
||||
# Publish the events
|
||||
event1_id=$(publish_event "$event1" "Event to be deleted #1")
|
||||
if [[ -z "$event1_id" ]]; then
|
||||
print_error "Failed to publish test event #1"
|
||||
return 1
|
||||
fi
|
||||
|
||||
event2_id=$(publish_event "$event2" "Event to be deleted #2")
|
||||
if [[ -z "$event2_id" ]]; then
|
||||
print_error "Failed to publish test event #2"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Create an addressable event (kind 30001) - will be deleted by address
|
||||
local addr_event=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Addressable event to be deleted" -k 30001 --ts $(($(date +%s) - 80)) -t "d=test-delete" -t "type=addressable" 2>/dev/null)
|
||||
|
||||
addr_event_id=$(publish_event "$addr_event" "Addressable event to be deleted")
|
||||
if [[ -z "$addr_event_id" ]]; then
|
||||
print_error "Failed to publish addressable test event"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Create an event by a different author (to test unauthorized deletion)
|
||||
local different_key="nsec1234567890abcdef1234567890abcdef1234567890abcdef1234567890ab"
|
||||
local unauth_event=$(nak event --sec "$different_key" -c "Event by different author" -k 1 --ts $(($(date +%s) - 70)) -t "type=unauthorized" 2>/dev/null)
|
||||
|
||||
unauth_event_id=$(publish_event "$unauth_event" "Event by different author")
|
||||
if [[ -z "$unauth_event_id" ]]; then
|
||||
print_warning "Failed to publish unauthorized test event - continuing anyway"
|
||||
fi
|
||||
|
||||
# Let events settle
|
||||
sleep 2
|
||||
|
||||
print_header "PHASE 2: Testing Event Deletion by ID"
|
||||
|
||||
print_step "Verifying events exist before deletion..."
|
||||
local event1_before=$(check_event_exists "$event1_id")
|
||||
local event2_before=$(check_event_exists "$event2_id")
|
||||
print_info "Event1 exists: $event1_before, Event2 exists: $event2_before"
|
||||
|
||||
# Create deletion request targeting the two events by ID
|
||||
print_step "Creating deletion request for events by ID..."
|
||||
local deletion_by_id=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Deleting events by ID" -k 5 --ts $(date +%s) -e "$event1_id" -e "$event2_id" -t "k=1" 2>/dev/null)
|
||||
|
||||
deletion_id=$(publish_deletion_request "$deletion_by_id" "Deletion request for events by ID")
|
||||
if [[ -z "$deletion_id" ]]; then
|
||||
print_error "Failed to publish deletion request"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Wait for deletion to process
|
||||
sleep 3
|
||||
|
||||
# Check if events were deleted
|
||||
print_step "Verifying events were deleted..."
|
||||
local event1_after=$(check_event_exists "$event1_id")
|
||||
local event2_after=$(check_event_exists "$event2_id")
|
||||
print_info "Event1 exists after deletion: $event1_after, Event2 exists after deletion: $event2_after"
|
||||
|
||||
if [[ "$event1_after" == "0" && "$event2_after" == "0" ]]; then
|
||||
print_success "✓ Events successfully deleted by ID"
|
||||
else
|
||||
print_error "✗ Events were not properly deleted"
|
||||
fi
|
||||
|
||||
print_header "PHASE 3: Testing Address-based Deletion"
|
||||
|
||||
if [[ -n "$addr_event_id" ]]; then
|
||||
print_step "Verifying addressable event exists before deletion..."
|
||||
local addr_before=$(check_event_exists "$addr_event_id")
|
||||
print_info "Addressable event exists: $addr_before"
|
||||
|
||||
# Create deletion request for addressable event using 'a' tag
|
||||
print_step "Creating deletion request for addressable event..."
|
||||
local test_pubkey=$(echo "$addr_event" | jq -r '.pubkey' 2>/dev/null)
|
||||
local deletion_by_addr=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Deleting addressable event" -k 5 --ts $(date +%s) -t "a=30001:${test_pubkey}:test-delete" -t "k=30001" 2>/dev/null)
|
||||
|
||||
addr_deletion_id=$(publish_deletion_request "$deletion_by_addr" "Deletion request for addressable event")
|
||||
if [[ -n "$addr_deletion_id" ]]; then
|
||||
# Wait for deletion to process
|
||||
sleep 3
|
||||
|
||||
# Check if addressable event was deleted
|
||||
print_step "Verifying addressable event was deleted..."
|
||||
local addr_after=$(check_event_exists "$addr_event_id")
|
||||
print_info "Addressable event exists after deletion: $addr_after"
|
||||
|
||||
if [[ "$addr_after" == "0" ]]; then
|
||||
print_success "✓ Addressable event successfully deleted"
|
||||
else
|
||||
print_error "✗ Addressable event was not properly deleted"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
print_header "PHASE 4: Testing Unauthorized Deletion"
|
||||
|
||||
if [[ -n "$unauth_event_id" ]]; then
|
||||
print_step "Testing unauthorized deletion attempt..."
|
||||
|
||||
# Try to delete the unauthorized event (should fail)
|
||||
local unauth_deletion=$(nak event --sec "$TEST_PRIVATE_KEY" -c "Attempting unauthorized deletion" -k 5 --ts $(date +%s) -e "$unauth_event_id" -t "k=1" 2>/dev/null)
|
||||
|
||||
unauth_deletion_id=$(publish_deletion_request "$unauth_deletion" "Unauthorized deletion request")
|
||||
if [[ -n "$unauth_deletion_id" ]]; then
|
||||
# Wait for processing
|
||||
sleep 3
|
||||
|
||||
# Check if unauthorized event still exists (should still exist)
|
||||
local unauth_after=$(check_event_exists "$unauth_event_id")
|
||||
print_info "Unauthorized event exists after deletion attempt: $unauth_after"
|
||||
|
||||
if [[ "$unauth_after" == "1" ]]; then
|
||||
print_success "✓ Unauthorized deletion properly rejected - event still exists"
|
||||
else
|
||||
print_error "✗ Unauthorized deletion succeeded - security vulnerability!"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
print_header "PHASE 5: Testing Invalid Deletion Requests"
|
||||
|
||||
print_step "Testing deletion request with no targets..."
|
||||
|
||||
# Create deletion request with no 'e' or 'a' tags (should be rejected)
|
||||
local invalid_deletion='{"id":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef","pubkey":"aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4","created_at":'$(date +%s)',"kind":5,"tags":[["k","1"]],"content":"Invalid deletion request with no targets","sig":"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"}'
|
||||
|
||||
# Create EVENT message in Nostr format
|
||||
local invalid_message="[\"EVENT\",$invalid_deletion]"
|
||||
|
||||
# Publish to relay
|
||||
local invalid_response=""
|
||||
if command -v websocat &> /dev/null; then
|
||||
invalid_response=$(echo "$invalid_message" | timeout 5s websocat "$RELAY_URL" 2>&1 || echo "Connection failed")
|
||||
fi
|
||||
|
||||
# Check response - should be rejected
|
||||
if [[ "$invalid_response" == *"false"* ]]; then
|
||||
print_success "✓ Invalid deletion request properly rejected"
|
||||
elif [[ "$invalid_response" == *"true"* ]]; then
|
||||
print_warning "⚠ Invalid deletion request was accepted (should have been rejected)"
|
||||
else
|
||||
print_info "Invalid deletion request response: $invalid_response"
|
||||
fi
|
||||
|
||||
print_header "PHASE 6: Verification"
|
||||
|
||||
# Verify deletion requests themselves are stored
|
||||
print_step "Verifying deletion requests are stored..."
|
||||
local deletion_count=$(query_events_by_kind 5)
|
||||
print_info "Deletion requests accessible via query: $deletion_count"
|
||||
|
||||
if [[ "$deletion_count" -gt 0 ]]; then
|
||||
print_success "✓ Deletion requests properly stored and queryable"
|
||||
else
|
||||
print_warning "⚠ No deletion requests found via query"
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# Run the test
|
||||
print_header "Starting NIP-09 Event Deletion Request Test Suite"
|
||||
echo
|
||||
|
||||
if run_deletion_test; then
|
||||
echo
|
||||
print_success "All NIP-09 deletion tests completed successfully!"
|
||||
print_info "The C-Relay NIP-09 implementation is working correctly"
|
||||
print_info "✅ Event deletion by ID working"
|
||||
print_info "✅ Address-based deletion working"
|
||||
print_info "✅ Authorization validation working"
|
||||
print_info "✅ Invalid deletion rejection working"
|
||||
echo
|
||||
exit 0
|
||||
else
|
||||
echo
|
||||
print_error "Some NIP-09 tests failed"
|
||||
exit 1
|
||||
fi
|
||||
199
tests/subscribe_all.sh
Executable file
199
tests/subscribe_all.sh
Executable file
@@ -0,0 +1,199 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Persistent Subscription Test Script
|
||||
# Subscribes to all events in the relay and prints them as they arrive in real-time
|
||||
# This tests the persistent subscription functionality of the C-Relay
|
||||
|
||||
set -e # Exit on any error
|
||||
|
||||
# Color constants
|
||||
RED='\033[31m'
|
||||
GREEN='\033[32m'
|
||||
YELLOW='\033[33m'
|
||||
BLUE='\033[34m'
|
||||
BOLD='\033[1m'
|
||||
RESET='\033[0m'
|
||||
|
||||
# Test configuration
|
||||
RELAY_URL="ws://127.0.0.1:8888"
|
||||
SUBSCRIPTION_ID="persistent_test_$(date +%s)"
|
||||
|
||||
# Print functions
|
||||
print_header() {
|
||||
echo -e "${BLUE}${BOLD}=== $1 ===${RESET}"
|
||||
}
|
||||
|
||||
print_info() {
|
||||
echo -e "${BLUE}[INFO]${RESET} $1"
|
||||
}
|
||||
|
||||
print_success() {
|
||||
echo -e "${GREEN}✓${RESET} $1"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
echo -e "${RED}✗${RESET} $1"
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
echo -e "${YELLOW}[WARNING]${RESET} $1"
|
||||
}
|
||||
|
||||
print_event() {
|
||||
echo -e "${GREEN}[EVENT]${RESET} $1"
|
||||
}
|
||||
|
||||
# Cleanup function
|
||||
cleanup() {
|
||||
print_info "Cleaning up..."
|
||||
if [[ -n "$WEBSOCAT_PID" ]]; then
|
||||
kill "$WEBSOCAT_PID" 2>/dev/null || true
|
||||
wait "$WEBSOCAT_PID" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Send CLOSE message to clean up subscription on relay
|
||||
if command -v websocat &> /dev/null; then
|
||||
echo "[\"CLOSE\",\"$SUBSCRIPTION_ID\"]" | timeout 2s websocat "$RELAY_URL" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
print_info "Cleanup complete"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Set up signal handlers
|
||||
trap cleanup SIGINT SIGTERM
|
||||
|
||||
# Parse events from relay responses
|
||||
parse_events() {
|
||||
while IFS= read -r line; do
|
||||
# Check if this is an EVENT message
|
||||
if echo "$line" | jq -e '. | type == "array" and length >= 3 and .[0] == "EVENT"' >/dev/null 2>&1; then
|
||||
# Extract event details
|
||||
local event_id=$(echo "$line" | jq -r '.[2].id' 2>/dev/null || echo "unknown")
|
||||
local event_kind=$(echo "$line" | jq -r '.[2].kind' 2>/dev/null || echo "unknown")
|
||||
local event_content=$(echo "$line" | jq -r '.[2].content' 2>/dev/null || echo "")
|
||||
local event_pubkey=$(echo "$line" | jq -r '.[2].pubkey' 2>/dev/null || echo "unknown")
|
||||
local event_created_at=$(echo "$line" | jq -r '.[2].created_at' 2>/dev/null || echo "unknown")
|
||||
local event_tags=$(echo "$line" | jq -r '.[2].tags | length' 2>/dev/null || echo "0")
|
||||
|
||||
# Convert timestamp to readable format
|
||||
local readable_time="unknown"
|
||||
if [[ "$event_created_at" != "unknown" && "$event_created_at" =~ ^[0-9]+$ ]]; then
|
||||
readable_time=$(date -d "@$event_created_at" "+%Y-%m-%d %H:%M:%S" 2>/dev/null || echo "$event_created_at")
|
||||
fi
|
||||
|
||||
# Print formatted event
|
||||
print_event "Kind: $event_kind | ID: ${event_id:0:16}... | Author: ${event_pubkey:0:16}..."
|
||||
echo -e " ${YELLOW}Time:${RESET} $readable_time | ${YELLOW}Tags:${RESET} $event_tags"
|
||||
|
||||
# Show content (truncated if too long)
|
||||
if [[ -n "$event_content" ]]; then
|
||||
local truncated_content="${event_content:0:100}"
|
||||
if [[ ${#event_content} -gt 100 ]]; then
|
||||
truncated_content="${truncated_content}..."
|
||||
fi
|
||||
echo -e " ${YELLOW}Content:${RESET} $truncated_content"
|
||||
fi
|
||||
echo # Blank line for readability
|
||||
|
||||
elif echo "$line" | jq -e '. | type == "array" and length >= 2 and .[0] == "EOSE"' >/dev/null 2>&1; then
|
||||
# End of stored events
|
||||
local sub_id=$(echo "$line" | jq -r '.[1]' 2>/dev/null)
|
||||
print_info "End of stored events for subscription: $sub_id"
|
||||
print_success "Persistent subscription is now active - waiting for new events..."
|
||||
echo
|
||||
|
||||
elif echo "$line" | jq -e '. | type == "array" and length >= 3 and .[0] == "CLOSED"' >/dev/null 2>&1; then
|
||||
# Subscription closed
|
||||
local sub_id=$(echo "$line" | jq -r '.[1]' 2>/dev/null)
|
||||
local reason=$(echo "$line" | jq -r '.[2]' 2>/dev/null)
|
||||
print_warning "Subscription $sub_id was closed: $reason"
|
||||
|
||||
elif echo "$line" | jq -e '. | type == "array" and length >= 4 and .[0] == "OK"' >/dev/null 2>&1; then
|
||||
# OK response to event publishing
|
||||
local event_id=$(echo "$line" | jq -r '.[1]' 2>/dev/null)
|
||||
local success=$(echo "$line" | jq -r '.[2]' 2>/dev/null)
|
||||
local message=$(echo "$line" | jq -r '.[3]' 2>/dev/null)
|
||||
if [[ "$success" == "true" ]]; then
|
||||
print_success "Event published: ${event_id:0:16}..."
|
||||
else
|
||||
print_error "Event publish failed: ${event_id:0:16}... - $message"
|
||||
fi
|
||||
|
||||
else
|
||||
# Unknown message type - just show it
|
||||
print_info "Relay message: $line"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Main function
|
||||
main() {
|
||||
print_header "Persistent Subscription Test - Subscribe to All Events"
|
||||
|
||||
# Check dependencies
|
||||
if ! command -v websocat &> /dev/null; then
|
||||
print_error "websocat command not found"
|
||||
print_info "Please install websocat for testing"
|
||||
return 1
|
||||
fi
|
||||
if ! command -v jq &> /dev/null; then
|
||||
print_error "jq command not found"
|
||||
print_info "Please install jq for JSON processing"
|
||||
return 1
|
||||
fi
|
||||
|
||||
print_info "Subscription ID: $SUBSCRIPTION_ID"
|
||||
print_info "Relay URL: $RELAY_URL"
|
||||
print_info "Filter: {} (all events)"
|
||||
echo
|
||||
|
||||
# Create REQ message to subscribe to all events
|
||||
local req_message="[\"REQ\",\"$SUBSCRIPTION_ID\",{}]"
|
||||
|
||||
print_info "Establishing persistent subscription..."
|
||||
print_info "Press Ctrl+C to stop and cleanup"
|
||||
echo
|
||||
|
||||
# Start websocat connection and keep it open
|
||||
{
|
||||
echo "$req_message"
|
||||
# Keep the connection alive by sleeping indefinitely
|
||||
# The connection will receive events as they come in
|
||||
while true; do
|
||||
sleep 1
|
||||
done
|
||||
} | websocat "$RELAY_URL" | parse_events &
|
||||
|
||||
# Store the background process ID
|
||||
WEBSOCAT_PID=$!
|
||||
|
||||
# Wait for the background process (which runs indefinitely)
|
||||
# This will exit when we get a signal (Ctrl+C)
|
||||
wait "$WEBSOCAT_PID" 2>/dev/null || true
|
||||
}
|
||||
|
||||
# Usage information
|
||||
usage() {
|
||||
echo "Usage: $0"
|
||||
echo
|
||||
echo "This script creates a persistent subscription to all events on the relay"
|
||||
echo "and displays them in real-time as they arrive. Perfect for testing"
|
||||
echo "the persistent subscription functionality."
|
||||
echo
|
||||
echo "To test:"
|
||||
echo "1. Run this script in one terminal"
|
||||
echo "2. Run 'tests/1_nip_test.sh' in another terminal"
|
||||
echo "3. Watch events appear in real-time in this terminal"
|
||||
echo
|
||||
echo "Press Ctrl+C to stop and cleanup the subscription."
|
||||
}
|
||||
|
||||
# Handle help flag
|
||||
if [[ "$1" == "-h" || "$1" == "--help" ]]; then
|
||||
usage
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Run main function
|
||||
main "$@"
|
||||
Reference in New Issue
Block a user