Compare commits
62 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f272264960 | ||
|
|
cb3171b390 | ||
|
|
03f036d60d | ||
|
|
9b35f463ae | ||
|
|
7b74486519 | ||
|
|
b276b44ded | ||
|
|
3792649ed9 | ||
|
|
5f08956605 | ||
|
|
643d89ed7b | ||
|
|
8ca459593c | ||
|
|
ee4208cc19 | ||
|
|
f6330e4bb8 | ||
|
|
4f3cf10a5c | ||
|
|
aa1954e81e | ||
|
|
482597bd0e | ||
|
|
d4b90e681c | ||
|
|
fcf9e43c4c | ||
|
|
b8d8cd19d3 | ||
|
|
536c2d966c | ||
|
|
f49cb0a5ac | ||
|
|
cef6bb2340 | ||
|
|
4c03253b30 | ||
|
|
ed09bb7370 | ||
|
|
5c46a25173 | ||
|
|
d1538f00df | ||
|
|
afa4acbbfb | ||
|
|
d9a530485f | ||
|
|
b2ad70b028 | ||
|
|
f49aae8ab0 | ||
|
|
f6debcf799 | ||
|
|
edbc4f1359 | ||
|
|
5242f066e7 | ||
|
|
af186800fa | ||
|
|
2bff4a5f44 | ||
|
|
edb73d50cf | ||
|
|
3dc09d55fd | ||
|
|
079fb1b0f5 | ||
|
|
17b2aa8111 | ||
|
|
78d484cfe0 | ||
|
|
182e12817d | ||
|
|
9179d57cc9 | ||
|
|
9cb9b746d8 | ||
|
|
57a0089664 | ||
|
|
53f7608872 | ||
|
|
838ce5b45a | ||
|
|
e878b9557e | ||
|
|
6638d37d6f | ||
|
|
4c29e15329 | ||
|
|
48890a2121 | ||
|
|
e312d7e18c | ||
|
|
6c38aaebf3 | ||
|
|
18b0ac44bf | ||
|
|
b6749eff2f | ||
|
|
c73a103280 | ||
|
|
a5d194f730 | ||
|
|
6320436b88 | ||
|
|
87325927ed | ||
|
|
4435cdf5b6 | ||
|
|
b041654611 | ||
|
|
e833dcefd4 | ||
|
|
29680f0ee8 | ||
|
|
670329700c |
1
.gitignore
vendored
@@ -11,4 +11,3 @@ copy_executable_local.sh
|
||||
nostr_login_lite/
|
||||
style_guide/
|
||||
nostr-tools
|
||||
|
||||
|
||||
6
.gitmodules
vendored
@@ -1,3 +1,9 @@
|
||||
[submodule "nostr_core_lib"]
|
||||
path = nostr_core_lib
|
||||
url = https://git.laantungir.net/laantungir/nostr_core_lib.git
|
||||
[submodule "c_utils_lib"]
|
||||
path = c_utils_lib
|
||||
url = ssh://git@git.laantungir.net:2222/laantungir/c_utils_lib.git
|
||||
[submodule "text_graph"]
|
||||
path = text_graph
|
||||
url = ssh://git@git.laantungir.net:2222/laantungir/text_graph.git
|
||||
|
||||
@@ -2,6 +2,6 @@
|
||||
description: "Brief description of what this command does"
|
||||
---
|
||||
|
||||
Run build_and_push.sh, and supply a good git commit message. For example:
|
||||
Run increment_and_push.sh, and supply a good git commit message. For example:
|
||||
|
||||
./build_and_push.sh "Fixed the bug with nip05 implementation"
|
||||
./increment_and_push.sh "Fixed the bug with nip05 implementation"
|
||||
@@ -1 +1 @@
|
||||
src/embedded_web_content.c
|
||||
src/embedded_web_content.c
|
||||
|
||||
@@ -121,8 +121,8 @@ fuser -k 8888/tcp
|
||||
- Event filtering done at C level, not SQL level for NIP-40 expiration
|
||||
|
||||
### Configuration Override Behavior
|
||||
- CLI port override only affects first-time startup
|
||||
- After database creation, all config comes from events
|
||||
- CLI port override applies during first-time startup and existing relay restarts
|
||||
- After database creation, all config comes from events (but CLI overrides can still be applied)
|
||||
- Database path cannot be changed after initialization
|
||||
|
||||
## Non-Obvious Pitfalls
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
# Alpine-based MUSL static binary builder for C-Relay
|
||||
# Produces truly portable binaries with zero runtime dependencies
|
||||
|
||||
ARG DEBUG_BUILD=false
|
||||
|
||||
FROM alpine:3.19 AS builder
|
||||
|
||||
# Re-declare build argument in this stage
|
||||
ARG DEBUG_BUILD=false
|
||||
|
||||
# Install build dependencies
|
||||
RUN apk add --no-cache \
|
||||
build-base \
|
||||
@@ -76,6 +81,15 @@ RUN git submodule update --init --recursive
|
||||
# Copy nostr_core_lib source files (cached unless nostr_core_lib changes)
|
||||
COPY nostr_core_lib /build/nostr_core_lib/
|
||||
|
||||
# Copy c_utils_lib source files (cached unless c_utils_lib changes)
|
||||
COPY c_utils_lib /build/c_utils_lib/
|
||||
|
||||
# Build c_utils_lib with MUSL-compatible flags (cached unless c_utils_lib changes)
|
||||
RUN cd c_utils_lib && \
|
||||
sed -i 's/CFLAGS = -Wall -Wextra -std=c99 -O2 -g/CFLAGS = -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -Wall -Wextra -std=c99 -O2 -g/' Makefile && \
|
||||
make clean && \
|
||||
make
|
||||
|
||||
# Build nostr_core_lib with required NIPs (cached unless nostr_core_lib changes)
|
||||
# Disable fortification in build.sh to prevent __*_chk symbol issues
|
||||
# NIPs: 001(Basic), 006(Keys), 013(PoW), 017(DMs), 019(Bech32), 044(Encryption), 059(Gift Wrap - required by NIP-17)
|
||||
@@ -91,20 +105,29 @@ COPY Makefile /build/Makefile
|
||||
|
||||
# Build c-relay with full static linking (only rebuilds when src/ changes)
|
||||
# Disable fortification to avoid __*_chk symbols that don't exist in MUSL
|
||||
RUN gcc -static -O2 -Wall -Wextra -std=c99 \
|
||||
# Use conditional compilation flags based on DEBUG_BUILD argument
|
||||
RUN if [ "$DEBUG_BUILD" = "true" ]; then \
|
||||
CFLAGS="-g -O0 -DDEBUG"; \
|
||||
STRIP_CMD=""; \
|
||||
echo "Building with DEBUG symbols enabled"; \
|
||||
else \
|
||||
CFLAGS="-O2"; \
|
||||
STRIP_CMD="strip /build/c_relay_static"; \
|
||||
echo "Building optimized production binary"; \
|
||||
fi && \
|
||||
gcc -static $CFLAGS -Wall -Wextra -std=c99 \
|
||||
-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 \
|
||||
-I. -Inostr_core_lib -Inostr_core_lib/nostr_core \
|
||||
-I. -Ic_utils_lib/src -Inostr_core_lib -Inostr_core_lib/nostr_core \
|
||||
-Inostr_core_lib/cjson -Inostr_core_lib/nostr_websocket \
|
||||
src/main.c src/config.c src/debug.c src/dm_admin.c src/request_validator.c \
|
||||
src/main.c src/config.c src/dm_admin.c src/request_validator.c \
|
||||
src/nip009.c src/nip011.c src/nip013.c src/nip040.c src/nip042.c \
|
||||
src/websockets.c src/subscriptions.c src/api.c src/embedded_web_content.c \
|
||||
-o /build/c_relay_static \
|
||||
c_utils_lib/libc_utils.a \
|
||||
nostr_core_lib/libnostr_core_x64.a \
|
||||
-lwebsockets -lssl -lcrypto -lsqlite3 -lsecp256k1 \
|
||||
-lcurl -lz -lpthread -lm -ldl
|
||||
|
||||
# Strip binary to reduce size
|
||||
RUN strip /build/c_relay_static
|
||||
-lcurl -lz -lpthread -lm -ldl && \
|
||||
eval "$STRIP_CMD"
|
||||
|
||||
# Verify it's truly static
|
||||
RUN echo "=== Binary Information ===" && \
|
||||
|
||||
27
Makefile
@@ -2,15 +2,16 @@
|
||||
|
||||
CC = gcc
|
||||
CFLAGS = -Wall -Wextra -std=c99 -g -O2
|
||||
INCLUDES = -I. -Inostr_core_lib -Inostr_core_lib/nostr_core -Inostr_core_lib/cjson -Inostr_core_lib/nostr_websocket
|
||||
LIBS = -lsqlite3 -lwebsockets -lz -ldl -lpthread -lm -L/usr/local/lib -lsecp256k1 -lssl -lcrypto -L/usr/local/lib -lcurl
|
||||
INCLUDES = -I. -Ic_utils_lib/src -Inostr_core_lib -Inostr_core_lib/nostr_core -Inostr_core_lib/cjson -Inostr_core_lib/nostr_websocket
|
||||
LIBS = -lsqlite3 -lwebsockets -lz -ldl -lpthread -lm -L/usr/local/lib -lsecp256k1 -lssl -lcrypto -L/usr/local/lib -lcurl -Lc_utils_lib -lc_utils
|
||||
|
||||
# Build directory
|
||||
BUILD_DIR = build
|
||||
|
||||
# Source files
|
||||
MAIN_SRC = src/main.c src/config.c src/debug.c src/dm_admin.c src/request_validator.c src/nip009.c src/nip011.c src/nip013.c src/nip040.c src/nip042.c src/websockets.c src/subscriptions.c src/api.c src/embedded_web_content.c
|
||||
MAIN_SRC = src/main.c src/config.c src/dm_admin.c src/request_validator.c src/nip009.c src/nip011.c src/nip013.c src/nip040.c src/nip042.c src/websockets.c src/subscriptions.c src/api.c src/embedded_web_content.c
|
||||
NOSTR_CORE_LIB = nostr_core_lib/libnostr_core_x64.a
|
||||
C_UTILS_LIB = c_utils_lib/libc_utils.a
|
||||
|
||||
# Architecture detection
|
||||
ARCH = $(shell uname -m)
|
||||
@@ -38,6 +39,11 @@ $(NOSTR_CORE_LIB):
|
||||
@echo "Building nostr_core_lib with required NIPs (including NIP-44 for encryption)..."
|
||||
cd nostr_core_lib && ./build.sh --nips=1,6,13,17,19,44,59
|
||||
|
||||
# Check if c_utils_lib is built
|
||||
$(C_UTILS_LIB):
|
||||
@echo "Building c_utils_lib..."
|
||||
cd c_utils_lib && ./build.sh lib
|
||||
|
||||
# Update main.h version information (requires main.h to exist)
|
||||
src/main.h:
|
||||
@if [ ! -f src/main.h ]; then \
|
||||
@@ -75,18 +81,18 @@ force-version:
|
||||
@$(MAKE) src/main.h
|
||||
|
||||
# Build the relay
|
||||
$(TARGET): $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
||||
$(TARGET): $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB) $(C_UTILS_LIB)
|
||||
@echo "Compiling C-Relay for architecture: $(ARCH)"
|
||||
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(TARGET) $(NOSTR_CORE_LIB) $(LIBS)
|
||||
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(TARGET) $(NOSTR_CORE_LIB) $(C_UTILS_LIB) $(LIBS)
|
||||
@echo "Build complete: $(TARGET)"
|
||||
|
||||
# Build for specific architectures
|
||||
x86: $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
||||
x86: $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB) $(C_UTILS_LIB)
|
||||
@echo "Building C-Relay for x86_64..."
|
||||
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(BUILD_DIR)/c_relay_x86 $(NOSTR_CORE_LIB) $(LIBS)
|
||||
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(BUILD_DIR)/c_relay_x86 $(NOSTR_CORE_LIB) $(C_UTILS_LIB) $(LIBS)
|
||||
@echo "Build complete: $(BUILD_DIR)/c_relay_x86"
|
||||
|
||||
arm64: $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
||||
arm64: $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB) $(C_UTILS_LIB)
|
||||
@echo "Cross-compiling C-Relay for ARM64..."
|
||||
@if ! command -v aarch64-linux-gnu-gcc >/dev/null 2>&1; then \
|
||||
echo "ERROR: ARM64 cross-compiler not found."; \
|
||||
@@ -110,7 +116,7 @@ arm64: $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
||||
fi
|
||||
@echo "Using aarch64-linux-gnu-gcc with ARM64 libraries..."
|
||||
PKG_CONFIG_PATH=/usr/lib/aarch64-linux-gnu/pkgconfig:/usr/share/pkgconfig \
|
||||
aarch64-linux-gnu-gcc $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(BUILD_DIR)/c_relay_arm64 $(NOSTR_CORE_LIB) \
|
||||
aarch64-linux-gnu-gcc $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(BUILD_DIR)/c_relay_arm64 $(NOSTR_CORE_LIB) $(C_UTILS_LIB) \
|
||||
-L/usr/lib/aarch64-linux-gnu $(LIBS)
|
||||
@echo "Build complete: $(BUILD_DIR)/c_relay_arm64"
|
||||
|
||||
@@ -161,9 +167,10 @@ clean:
|
||||
rm -rf $(BUILD_DIR)
|
||||
@echo "Clean complete"
|
||||
|
||||
# Clean everything including nostr_core_lib
|
||||
# Clean everything including nostr_core_lib and c_utils_lib
|
||||
clean-all: clean
|
||||
cd nostr_core_lib && make clean 2>/dev/null || true
|
||||
cd c_utils_lib && make clean 2>/dev/null || true
|
||||
|
||||
# Install dependencies (Ubuntu/Debian)
|
||||
install-deps:
|
||||
|
||||
65
NOSTR_RELEASE.md
Normal file
@@ -0,0 +1,65 @@
|
||||
# Relay
|
||||
|
||||
I am releasing the code for the nostr relay that I wrote use myself. The code is free for anyone to use in any way that they wish.
|
||||
|
||||
Some of the features of this relay are conventional, and some are unconventional.
|
||||
|
||||
## The conventional
|
||||
|
||||
This relay is written in C99 with a sqlite database.
|
||||
|
||||
It implements the following NIPs.
|
||||
|
||||
- [x] NIP-01: Basic protocol flow implementation
|
||||
- [x] NIP-09: Event deletion
|
||||
- [x] NIP-11: Relay information document
|
||||
- [x] NIP-13: Proof of Work
|
||||
- [x] NIP-15: End of Stored Events Notice
|
||||
- [x] NIP-20: Command Results
|
||||
- [x] NIP-33: Parameterized Replaceable Events
|
||||
- [x] NIP-40: Expiration Timestamp
|
||||
- [x] NIP-42: Authentication of clients to relays
|
||||
- [x] NIP-45: Counting results
|
||||
- [x] NIP-50: Keywords filter
|
||||
- [x] NIP-70: Protected Events
|
||||
|
||||
## The unconventional
|
||||
|
||||
### The binaries are fully self contained.
|
||||
|
||||
It should just run in linux without having to worry about what you have on your system. I want to download and run. No docker. No dependency hell.
|
||||
|
||||
I'm not bothering with other operating systems.
|
||||
|
||||
### The relay is a full nostr citizen with it's own public and private keys.
|
||||
|
||||
For example, you can see my relay's profile (wss://relay.laantungir.net) running here:
|
||||
|
||||
[Primal link](https://primal.net/p/nprofile1qqswn2jsmm8lq8evas0v9vhqkdpn9nuujt90mtz60nqgsxndy66es4qjjnhr7)
|
||||
|
||||
What this means in practice is that when you start the relay, it generates keys for itself, and for it's administrator (You can specify these if you wish)
|
||||
|
||||
Now the program and the administrator can have verifed communication between the two, simply by using nostr events. For example, the administrator can send DMs to the relay, asking it's status, and changing it's configuration through any client that can handle nip17 DMs. The relay can also send notifications to the administrator about it's current status, or it can publish it's status on a regular schedule directly to NOSTR as kind-1 notes.
|
||||
|
||||
Also included is a more standard administrative web front end. This front end communicates to the relay using an extensive api, which again is nostr events signed by the administrator. You can completely control the relay through signed nostr events.
|
||||
|
||||
## Screenshots
|
||||
|
||||

|
||||
Main page with real time updates.
|
||||
|
||||

|
||||
Set your configuration preferences.
|
||||
|
||||

|
||||
View current subscriptions
|
||||
|
||||

|
||||
Add npubs to white or black lists.
|
||||
|
||||

|
||||
Run sql queries on the database.
|
||||
|
||||

|
||||
Light mode.
|
||||
|
||||
127
README.md
@@ -164,6 +164,8 @@ All commands are sent as NIP-44 encrypted JSON arrays in the event content. The
|
||||
| `system_clear_auth` | `["system_command", "clear_all_auth_rules"]` | Clear all auth rules |
|
||||
| `system_status` | `["system_command", "system_status"]` | Get system status |
|
||||
| `stats_query` | `["stats_query"]` | Get comprehensive database statistics |
|
||||
| **Database Queries** |
|
||||
| `sql_query` | `["sql_query", "SELECT * FROM events LIMIT 10"]` | Execute read-only SQL query against relay database |
|
||||
|
||||
### Available Configuration Keys
|
||||
|
||||
@@ -193,6 +195,9 @@ All commands are sent as NIP-44 encrypted JSON arrays in the event content. The
|
||||
- `pow_min_difficulty`: Minimum proof-of-work difficulty
|
||||
- `nip40_expiration_enabled`: Enable event expiration (`true`/`false`)
|
||||
|
||||
**Monitoring Settings:**
|
||||
- `kind_24567_reporting_throttle_sec`: Minimum seconds between monitoring events (default: 5)
|
||||
|
||||
### Dynamic Configuration Updates
|
||||
|
||||
C-Relay supports **dynamic configuration updates** without requiring a restart for most settings. Configuration parameters are categorized as either **dynamic** (can be updated immediately) or **restart-required** (require relay restart to take effect).
|
||||
@@ -320,8 +325,68 @@ All admin commands return **signed EVENT responses** via WebSocket following sta
|
||||
],
|
||||
"sig": "response_event_signature"
|
||||
}]
|
||||
```
|
||||
|
||||
**SQL Query Response:**
|
||||
```json
|
||||
["EVENT", "temp_sub_id", {
|
||||
"id": "response_event_id",
|
||||
"pubkey": "relay_public_key",
|
||||
"created_at": 1234567890,
|
||||
"kind": 23457,
|
||||
"content": "nip44 encrypted:{\"query_type\": \"sql_query\", \"request_id\": \"request_event_id\", \"timestamp\": 1234567890, \"query\": \"SELECT * FROM events LIMIT 10\", \"execution_time_ms\": 45, \"row_count\": 10, \"columns\": [\"id\", \"pubkey\", \"created_at\", \"kind\", \"content\"], \"rows\": [[\"abc123...\", \"def456...\", 1234567890, 1, \"Hello world\"], ...]}",
|
||||
"tags": [
|
||||
["p", "admin_public_key"],
|
||||
["e", "request_event_id"]
|
||||
],
|
||||
"sig": "response_event_signature"
|
||||
}]
|
||||
```
|
||||
|
||||
### SQL Query Command
|
||||
|
||||
The `sql_query` command allows administrators to execute read-only SQL queries against the relay database. This provides powerful analytics and debugging capabilities through the admin API.
|
||||
|
||||
**Request/Response Correlation:**
|
||||
- Each response includes the request event ID in both the `tags` array (`["e", "request_event_id"]`) and the decrypted content (`"request_id": "request_event_id"`)
|
||||
- This allows proper correlation when multiple queries are submitted concurrently
|
||||
- Frontend can track pending queries and match responses to requests
|
||||
|
||||
**Security Features:**
|
||||
- Only SELECT statements allowed (INSERT, UPDATE, DELETE, DROP, etc. are blocked)
|
||||
- Query timeout: 5 seconds (configurable)
|
||||
- Result row limit: 1000 rows (configurable)
|
||||
- All queries logged with execution time
|
||||
|
||||
**Available Tables and Views:**
|
||||
- `events` - All Nostr events
|
||||
- `config` - Configuration parameters
|
||||
- `auth_rules` - Authentication rules
|
||||
- `subscription_events` - Subscription lifecycle log
|
||||
- `event_broadcasts` - Event broadcast log
|
||||
- `recent_events` - Last 1000 events (view)
|
||||
- `event_stats` - Event statistics by type (view)
|
||||
- `subscription_analytics` - Subscription metrics (view)
|
||||
- `active_subscriptions_log` - Currently active subscriptions (view)
|
||||
- `event_kinds_view` - Event distribution by kind (view)
|
||||
- `top_pubkeys_view` - Top 10 pubkeys by event count (view)
|
||||
- `time_stats_view` - Time-based statistics (view)
|
||||
|
||||
**Example Queries:**
|
||||
```sql
|
||||
-- Recent events
|
||||
SELECT id, pubkey, created_at, kind FROM events ORDER BY created_at DESC LIMIT 20
|
||||
|
||||
-- Event distribution by kind
|
||||
SELECT * FROM event_kinds_view ORDER BY count DESC
|
||||
|
||||
-- Active subscriptions
|
||||
SELECT * FROM active_subscriptions_log ORDER BY created_at DESC
|
||||
|
||||
-- Database statistics
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM events) as total_events,
|
||||
(SELECT COUNT(*) FROM subscription_events) as total_subscriptions
|
||||
```
|
||||
|
||||
|
||||
@@ -329,6 +394,68 @@ All admin commands return **signed EVENT responses** via WebSocket following sta
|
||||
|
||||
|
||||
|
||||
## Real-time Monitoring System
|
||||
|
||||
C-Relay includes a subscription-based monitoring system that broadcasts real-time relay statistics using ephemeral events (kind 24567).
|
||||
|
||||
### Activation
|
||||
|
||||
The monitoring system activates automatically when clients subscribe to kind 24567 events:
|
||||
|
||||
```json
|
||||
["REQ", "monitoring-sub", {"kinds": [24567]}]
|
||||
```
|
||||
|
||||
For specific monitoring types, use d-tag filters:
|
||||
|
||||
```json
|
||||
["REQ", "event-kinds-sub", {"kinds": [24567], "#d": ["event_kinds"]}]
|
||||
["REQ", "time-stats-sub", {"kinds": [24567], "#d": ["time_stats"]}]
|
||||
["REQ", "top-pubkeys-sub", {"kinds": [24567], "#d": ["top_pubkeys"]}]
|
||||
```
|
||||
|
||||
When no subscriptions exist, monitoring is dormant to conserve resources.
|
||||
|
||||
### Monitoring Event Types
|
||||
|
||||
| Type | d Tag | Description |
|
||||
|------|-------|-------------|
|
||||
| Event Distribution | `event_kinds` | Event count by kind with percentages |
|
||||
| Time Statistics | `time_stats` | Events in last 24h, 7d, 30d |
|
||||
| Top Publishers | `top_pubkeys` | Top 10 pubkeys by event count |
|
||||
| Active Subscriptions | `active_subscriptions` | Current subscription details (admin only) |
|
||||
| Subscription Details | `subscription_details` | Detailed subscription info (admin only) |
|
||||
| CPU Metrics | `cpu_metrics` | Process CPU and memory usage |
|
||||
|
||||
### Event Structure
|
||||
|
||||
```json
|
||||
{
|
||||
"kind": 24567,
|
||||
"pubkey": "<relay_pubkey>",
|
||||
"created_at": <timestamp>,
|
||||
"content": "{\"data_type\":\"event_kinds\",\"timestamp\":1234567890,...}",
|
||||
"tags": [
|
||||
["d", "event_kinds"]
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
- `kind_24567_reporting_throttle_sec`: Minimum seconds between monitoring events (default: 5)
|
||||
|
||||
### Web Dashboard Integration
|
||||
|
||||
The built-in web dashboard (`/api/`) automatically subscribes to monitoring events and displays real-time statistics.
|
||||
|
||||
### Performance Considerations
|
||||
|
||||
- Monitoring events are ephemeral (not stored in database)
|
||||
- Throttling prevents excessive event generation
|
||||
- Automatic activation/deactivation based on subscriptions
|
||||
- Minimal overhead when no clients are monitoring
|
||||
|
||||
## Direct Messaging Admin System
|
||||
|
||||
In addition to the above admin API, c-relay allows the administrator to direct message the relay to get information or control some settings. As long as the administrator is signed in with any nostr client that allows sending nip-17 direct messages (DMs), they can control the relay.
|
||||
|
||||
612
STATIC_MUSL_GUIDE.md
Normal file
@@ -0,0 +1,612 @@
|
||||
# Static MUSL Build Guide for C Programs
|
||||
|
||||
## Overview
|
||||
|
||||
This guide explains how to build truly portable static binaries using Alpine Linux and MUSL libc. These binaries have **zero runtime dependencies** and work on any Linux distribution without modification.
|
||||
|
||||
This guide is specifically tailored for C programs that use:
|
||||
- **nostr_core_lib** - Nostr protocol implementation
|
||||
- **nostr_login_lite** - Nostr authentication library
|
||||
- Common dependencies: libwebsockets, OpenSSL, SQLite, curl, secp256k1
|
||||
|
||||
## Why MUSL Static Binaries?
|
||||
|
||||
### Advantages Over glibc
|
||||
|
||||
| Feature | MUSL Static | glibc Static | glibc Dynamic |
|
||||
|---------|-------------|--------------|---------------|
|
||||
| **Portability** | ✓ Any Linux | ⚠ glibc only | ✗ Requires matching libs |
|
||||
| **Binary Size** | ~7-10 MB | ~12-15 MB | ~2-3 MB |
|
||||
| **Dependencies** | None | NSS libs | Many system libs |
|
||||
| **Deployment** | Single file | Single file + NSS | Binary + libraries |
|
||||
| **Compatibility** | Universal | glibc version issues | Library version hell |
|
||||
|
||||
### Key Benefits
|
||||
|
||||
1. **True Portability**: Works on Alpine, Ubuntu, Debian, CentOS, Arch, etc.
|
||||
2. **No Library Hell**: No `GLIBC_2.XX not found` errors
|
||||
3. **Simple Deployment**: Just copy one file
|
||||
4. **Reproducible Builds**: Same Docker image = same binary
|
||||
5. **Security**: No dependency on system libraries with vulnerabilities
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Docker installed and running
|
||||
- Your C project with source code
|
||||
- Internet connection for downloading dependencies
|
||||
|
||||
### Basic Build Process
|
||||
|
||||
```bash
|
||||
# 1. Copy the Dockerfile template (see below)
|
||||
cp /path/to/c-relay/Dockerfile.alpine-musl ./Dockerfile.static
|
||||
|
||||
# 2. Customize for your project (see Customization section)
|
||||
vim Dockerfile.static
|
||||
|
||||
# 3. Build the static binary
|
||||
docker build --platform linux/amd64 -f Dockerfile.static -t my-app-builder .
|
||||
|
||||
# 4. Extract the binary
|
||||
docker create --name temp-container my-app-builder
|
||||
docker cp temp-container:/build/my_app_static ./my_app_static
|
||||
docker rm temp-container
|
||||
|
||||
# 5. Verify it's static
|
||||
ldd ./my_app_static # Should show "not a dynamic executable"
|
||||
```
|
||||
|
||||
## Dockerfile Template
|
||||
|
||||
Here's a complete Dockerfile template you can customize for your project:
|
||||
|
||||
```dockerfile
|
||||
# Alpine-based MUSL static binary builder
|
||||
# Produces truly portable binaries with zero runtime dependencies
|
||||
|
||||
FROM alpine:3.19 AS builder
|
||||
|
||||
# Install build dependencies
|
||||
RUN apk add --no-cache \
|
||||
build-base \
|
||||
musl-dev \
|
||||
git \
|
||||
cmake \
|
||||
pkgconfig \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool \
|
||||
openssl-dev \
|
||||
openssl-libs-static \
|
||||
zlib-dev \
|
||||
zlib-static \
|
||||
curl-dev \
|
||||
curl-static \
|
||||
sqlite-dev \
|
||||
sqlite-static \
|
||||
linux-headers \
|
||||
wget \
|
||||
bash
|
||||
|
||||
WORKDIR /build
|
||||
|
||||
# Build libsecp256k1 static (required for Nostr)
|
||||
RUN cd /tmp && \
|
||||
git clone https://github.com/bitcoin-core/secp256k1.git && \
|
||||
cd secp256k1 && \
|
||||
./autogen.sh && \
|
||||
./configure --enable-static --disable-shared --prefix=/usr \
|
||||
CFLAGS="-fPIC" && \
|
||||
make -j$(nproc) && \
|
||||
make install && \
|
||||
rm -rf /tmp/secp256k1
|
||||
|
||||
# Build libwebsockets static (if needed for WebSocket support)
|
||||
RUN cd /tmp && \
|
||||
git clone --depth 1 --branch v4.3.3 https://github.com/warmcat/libwebsockets.git && \
|
||||
cd libwebsockets && \
|
||||
mkdir build && cd build && \
|
||||
cmake .. \
|
||||
-DLWS_WITH_STATIC=ON \
|
||||
-DLWS_WITH_SHARED=OFF \
|
||||
-DLWS_WITH_SSL=ON \
|
||||
-DLWS_WITHOUT_TESTAPPS=ON \
|
||||
-DLWS_WITHOUT_TEST_SERVER=ON \
|
||||
-DLWS_WITHOUT_TEST_CLIENT=ON \
|
||||
-DLWS_WITHOUT_TEST_PING=ON \
|
||||
-DLWS_WITH_HTTP2=OFF \
|
||||
-DLWS_WITH_LIBUV=OFF \
|
||||
-DLWS_WITH_LIBEVENT=OFF \
|
||||
-DLWS_IPV6=ON \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DCMAKE_C_FLAGS="-fPIC" && \
|
||||
make -j$(nproc) && \
|
||||
make install && \
|
||||
rm -rf /tmp/libwebsockets
|
||||
|
||||
# Copy git configuration for submodules
|
||||
COPY .gitmodules /build/.gitmodules
|
||||
COPY .git /build/.git
|
||||
|
||||
# Initialize submodules
|
||||
RUN git submodule update --init --recursive
|
||||
|
||||
# Copy and build nostr_core_lib
|
||||
COPY nostr_core_lib /build/nostr_core_lib/
|
||||
RUN cd nostr_core_lib && \
|
||||
chmod +x build.sh && \
|
||||
sed -i 's/CFLAGS="-Wall -Wextra -std=c99 -fPIC -O2"/CFLAGS="-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -Wall -Wextra -std=c99 -fPIC -O2"/' build.sh && \
|
||||
rm -f *.o *.a 2>/dev/null || true && \
|
||||
./build.sh --nips=1,6,13,17,19,44,59
|
||||
|
||||
# Copy and build nostr_login_lite (if used)
|
||||
# COPY nostr_login_lite /build/nostr_login_lite/
|
||||
# RUN cd nostr_login_lite && make static
|
||||
|
||||
# Copy your application source
|
||||
COPY src/ /build/src/
|
||||
COPY Makefile /build/Makefile
|
||||
|
||||
# Build your application with full static linking
|
||||
RUN gcc -static -O2 -Wall -Wextra -std=c99 \
|
||||
-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 \
|
||||
-I. -Inostr_core_lib -Inostr_core_lib/nostr_core \
|
||||
-Inostr_core_lib/cjson -Inostr_core_lib/nostr_websocket \
|
||||
src/*.c \
|
||||
-o /build/my_app_static \
|
||||
nostr_core_lib/libnostr_core_x64.a \
|
||||
-lwebsockets -lssl -lcrypto -lsqlite3 -lsecp256k1 \
|
||||
-lcurl -lz -lpthread -lm -ldl && \
|
||||
strip /build/my_app_static
|
||||
|
||||
# Verify it's truly static
|
||||
RUN echo "=== Binary Information ===" && \
|
||||
file /build/my_app_static && \
|
||||
ls -lh /build/my_app_static && \
|
||||
echo "=== Checking for dynamic dependencies ===" && \
|
||||
(ldd /build/my_app_static 2>&1 || echo "Binary is static")
|
||||
|
||||
# Output stage - just the binary
|
||||
FROM scratch AS output
|
||||
COPY --from=builder /build/my_app_static /my_app_static
|
||||
```
|
||||
|
||||
## Customization Guide
|
||||
|
||||
### 1. Adjust Dependencies
|
||||
|
||||
**Add dependencies** by modifying the `apk add` section:
|
||||
|
||||
```dockerfile
|
||||
RUN apk add --no-cache \
|
||||
build-base \
|
||||
musl-dev \
|
||||
# Add your dependencies here:
|
||||
libpng-dev \
|
||||
libpng-static \
|
||||
libjpeg-turbo-dev \
|
||||
libjpeg-turbo-static
|
||||
```
|
||||
|
||||
**Remove unused dependencies** to speed up builds:
|
||||
- Remove `libwebsockets` section if you don't need WebSocket support
|
||||
- Remove `sqlite` if you don't use databases
|
||||
- Remove `curl` if you don't make HTTP requests
|
||||
|
||||
### 2. Configure nostr_core_lib NIPs
|
||||
|
||||
Specify which NIPs your application needs:
|
||||
|
||||
```bash
|
||||
./build.sh --nips=1,6,19 # Minimal: Basic protocol, keys, bech32
|
||||
./build.sh --nips=1,6,13,17,19,44,59 # Full: All common NIPs
|
||||
./build.sh --nips=all # Everything available
|
||||
```
|
||||
|
||||
**Common NIP combinations:**
|
||||
- **Basic client**: `1,6,19` (events, keys, bech32)
|
||||
- **With encryption**: `1,6,19,44` (add modern encryption)
|
||||
- **With DMs**: `1,6,17,19,44,59` (add private messages)
|
||||
- **Relay/server**: `1,6,13,17,19,42,44,59` (add PoW, auth)
|
||||
|
||||
### 3. Modify Compilation Flags
|
||||
|
||||
**For your application:**
|
||||
|
||||
```dockerfile
|
||||
RUN gcc -static -O2 -Wall -Wextra -std=c99 \
|
||||
-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 \ # REQUIRED for MUSL
|
||||
-I. -Inostr_core_lib \ # Include paths
|
||||
src/*.c \ # Your source files
|
||||
-o /build/my_app_static \ # Output binary
|
||||
nostr_core_lib/libnostr_core_x64.a \ # Nostr library
|
||||
-lwebsockets -lssl -lcrypto \ # Link libraries
|
||||
-lsqlite3 -lsecp256k1 -lcurl \
|
||||
-lz -lpthread -lm -ldl
|
||||
```
|
||||
|
||||
**Debug build** (with symbols, no optimization):
|
||||
|
||||
```dockerfile
|
||||
RUN gcc -static -g -O0 -DDEBUG \
|
||||
-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 \
|
||||
# ... rest of flags
|
||||
```
|
||||
|
||||
### 4. Multi-Architecture Support
|
||||
|
||||
Build for different architectures:
|
||||
|
||||
```bash
|
||||
# x86_64 (Intel/AMD)
|
||||
docker build --platform linux/amd64 -f Dockerfile.static -t my-app-x86 .
|
||||
|
||||
# ARM64 (Apple Silicon, Raspberry Pi 4+)
|
||||
docker build --platform linux/arm64 -f Dockerfile.static -t my-app-arm64 .
|
||||
```
|
||||
|
||||
## Build Script Template
|
||||
|
||||
Create a `build_static.sh` script for convenience:
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
BUILD_DIR="$SCRIPT_DIR/build"
|
||||
DOCKERFILE="$SCRIPT_DIR/Dockerfile.static"
|
||||
|
||||
# Detect architecture
|
||||
ARCH=$(uname -m)
|
||||
case "$ARCH" in
|
||||
x86_64)
|
||||
PLATFORM="linux/amd64"
|
||||
OUTPUT_NAME="my_app_static_x86_64"
|
||||
;;
|
||||
aarch64|arm64)
|
||||
PLATFORM="linux/arm64"
|
||||
OUTPUT_NAME="my_app_static_arm64"
|
||||
;;
|
||||
*)
|
||||
echo "Unknown architecture: $ARCH"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Building for platform: $PLATFORM"
|
||||
mkdir -p "$BUILD_DIR"
|
||||
|
||||
# Build Docker image
|
||||
docker build \
|
||||
--platform "$PLATFORM" \
|
||||
-f "$DOCKERFILE" \
|
||||
-t my-app-builder:latest \
|
||||
--progress=plain \
|
||||
.
|
||||
|
||||
# Extract binary
|
||||
CONTAINER_ID=$(docker create my-app-builder:latest)
|
||||
docker cp "$CONTAINER_ID:/build/my_app_static" "$BUILD_DIR/$OUTPUT_NAME"
|
||||
docker rm "$CONTAINER_ID"
|
||||
|
||||
chmod +x "$BUILD_DIR/$OUTPUT_NAME"
|
||||
|
||||
echo "✓ Build complete: $BUILD_DIR/$OUTPUT_NAME"
|
||||
echo "✓ Size: $(du -h "$BUILD_DIR/$OUTPUT_NAME" | cut -f1)"
|
||||
|
||||
# Verify
|
||||
if ldd "$BUILD_DIR/$OUTPUT_NAME" 2>&1 | grep -q "not a dynamic executable"; then
|
||||
echo "✓ Binary is fully static"
|
||||
else
|
||||
echo "⚠ Warning: Binary may have dynamic dependencies"
|
||||
fi
|
||||
```
|
||||
|
||||
Make it executable:
|
||||
|
||||
```bash
|
||||
chmod +x build_static.sh
|
||||
./build_static.sh
|
||||
```
|
||||
|
||||
## Common Issues and Solutions
|
||||
|
||||
### Issue 1: Fortification Errors
|
||||
|
||||
**Error:**
|
||||
```
|
||||
undefined reference to '__snprintf_chk'
|
||||
undefined reference to '__fprintf_chk'
|
||||
```
|
||||
|
||||
**Cause**: GCC's `-O2` enables fortification by default, which uses glibc-specific functions.
|
||||
|
||||
**Solution**: Add these flags to **all** compilation commands:
|
||||
```bash
|
||||
-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0
|
||||
```
|
||||
|
||||
This must be applied to:
|
||||
1. nostr_core_lib build.sh
|
||||
2. Your application compilation
|
||||
3. Any other libraries you build
|
||||
|
||||
### Issue 2: Missing Symbols from nostr_core_lib
|
||||
|
||||
**Error:**
|
||||
```
|
||||
undefined reference to 'nostr_create_event'
|
||||
undefined reference to 'nostr_sign_event'
|
||||
```
|
||||
|
||||
**Cause**: Required NIPs not included in nostr_core_lib build.
|
||||
|
||||
**Solution**: Add missing NIPs:
|
||||
```bash
|
||||
./build.sh --nips=1,6,19 # Add the NIPs you need
|
||||
```
|
||||
|
||||
### Issue 3: Docker Permission Denied
|
||||
|
||||
**Error:**
|
||||
```
|
||||
permission denied while trying to connect to the Docker daemon socket
|
||||
```
|
||||
|
||||
**Solution**:
|
||||
```bash
|
||||
sudo usermod -aG docker $USER
|
||||
newgrp docker # Or logout and login
|
||||
```
|
||||
|
||||
### Issue 4: Binary Won't Run on Target System
|
||||
|
||||
**Checks**:
|
||||
```bash
|
||||
# 1. Verify it's static
|
||||
ldd my_app_static # Should show "not a dynamic executable"
|
||||
|
||||
# 2. Check architecture
|
||||
file my_app_static # Should match target system
|
||||
|
||||
# 3. Test on different distributions
|
||||
docker run --rm -v $(pwd):/app alpine:latest /app/my_app_static --version
|
||||
docker run --rm -v $(pwd):/app ubuntu:latest /app/my_app_static --version
|
||||
```
|
||||
|
||||
## Project Structure Example
|
||||
|
||||
Organize your project for easy static builds:
|
||||
|
||||
```
|
||||
my-nostr-app/
|
||||
├── src/
|
||||
│ ├── main.c
|
||||
│ ├── handlers.c
|
||||
│ └── utils.c
|
||||
├── nostr_core_lib/ # Git submodule
|
||||
├── nostr_login_lite/ # Git submodule (if used)
|
||||
├── Dockerfile.static # Static build Dockerfile
|
||||
├── build_static.sh # Build script
|
||||
├── Makefile # Regular build
|
||||
└── README.md
|
||||
```
|
||||
|
||||
### Makefile Integration
|
||||
|
||||
Add static build targets to your Makefile:
|
||||
|
||||
```makefile
|
||||
# Regular dynamic build
|
||||
all: my_app
|
||||
|
||||
my_app: src/*.c
|
||||
gcc -O2 src/*.c -o my_app \
|
||||
nostr_core_lib/libnostr_core_x64.a \
|
||||
-lssl -lcrypto -lsecp256k1 -lz -lpthread -lm
|
||||
|
||||
# Static MUSL build via Docker
|
||||
static:
|
||||
./build_static.sh
|
||||
|
||||
# Clean
|
||||
clean:
|
||||
rm -f my_app build/my_app_static_*
|
||||
|
||||
.PHONY: all static clean
|
||||
```
|
||||
|
||||
## Deployment
|
||||
|
||||
### Single Binary Deployment
|
||||
|
||||
```bash
|
||||
# Copy to server
|
||||
scp build/my_app_static_x86_64 user@server:/opt/my-app/
|
||||
|
||||
# Run (no dependencies needed!)
|
||||
ssh user@server
|
||||
/opt/my-app/my_app_static_x86_64
|
||||
```
|
||||
|
||||
### SystemD Service
|
||||
|
||||
```ini
|
||||
[Unit]
|
||||
Description=My Nostr Application
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=myapp
|
||||
WorkingDirectory=/opt/my-app
|
||||
ExecStart=/opt/my-app/my_app_static_x86_64
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
### Docker Container (Minimal)
|
||||
|
||||
```dockerfile
|
||||
FROM scratch
|
||||
COPY my_app_static_x86_64 /app
|
||||
ENTRYPOINT ["/app"]
|
||||
```
|
||||
|
||||
Build and run:
|
||||
```bash
|
||||
docker build -t my-app:latest .
|
||||
docker run --rm my-app:latest --help
|
||||
```
|
||||
|
||||
## Reusing c-relay Files
|
||||
|
||||
You can directly copy these files from c-relay:
|
||||
|
||||
### 1. Dockerfile.alpine-musl
|
||||
```bash
|
||||
cp /path/to/c-relay/Dockerfile.alpine-musl ./Dockerfile.static
|
||||
```
|
||||
|
||||
Then customize:
|
||||
- Change binary name (line 125)
|
||||
- Adjust source files (line 122-124)
|
||||
- Modify include paths (line 120-121)
|
||||
|
||||
### 2. build_static.sh
|
||||
```bash
|
||||
cp /path/to/c-relay/build_static.sh ./
|
||||
```
|
||||
|
||||
Then customize:
|
||||
- Change `OUTPUT_NAME` variable (lines 66, 70)
|
||||
- Update Docker image name (line 98)
|
||||
- Modify verification commands (lines 180-184)
|
||||
|
||||
### 3. .dockerignore (Optional)
|
||||
```bash
|
||||
cp /path/to/c-relay/.dockerignore ./
|
||||
```
|
||||
|
||||
Helps speed up Docker builds by excluding unnecessary files.
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Version Control**: Commit your Dockerfile and build script
|
||||
2. **Tag Builds**: Include git commit hash in binary version
|
||||
3. **Test Thoroughly**: Verify on multiple distributions
|
||||
4. **Document Dependencies**: List required NIPs and libraries
|
||||
5. **Automate**: Use CI/CD to build on every commit
|
||||
6. **Archive Binaries**: Keep old versions for rollback
|
||||
|
||||
## Performance Comparison
|
||||
|
||||
| Metric | MUSL Static | glibc Dynamic |
|
||||
|--------|-------------|---------------|
|
||||
| Binary Size | 7-10 MB | 2-3 MB + libs |
|
||||
| Startup Time | ~50ms | ~40ms |
|
||||
| Memory Usage | Similar | Similar |
|
||||
| Portability | ✓ Universal | ✗ System-dependent |
|
||||
| Deployment | Single file | Binary + libraries |
|
||||
|
||||
## References
|
||||
|
||||
- [MUSL libc](https://musl.libc.org/)
|
||||
- [Alpine Linux](https://alpinelinux.org/)
|
||||
- [nostr_core_lib](https://github.com/chebizarro/nostr_core_lib)
|
||||
- [Static Linking Best Practices](https://www.musl-libc.org/faq.html)
|
||||
- [c-relay Implementation](./docs/musl_static_build.md)
|
||||
|
||||
## Example: Minimal Nostr Client
|
||||
|
||||
Here's a complete example of building a minimal Nostr client:
|
||||
|
||||
```c
|
||||
// minimal_client.c
|
||||
#include "nostr_core/nostr_core.h"
|
||||
#include <stdio.h>
|
||||
|
||||
int main() {
|
||||
// Generate keypair
|
||||
char nsec[64], npub[64];
|
||||
nostr_generate_keypair(nsec, npub);
|
||||
|
||||
printf("Generated keypair:\n");
|
||||
printf("Private key (nsec): %s\n", nsec);
|
||||
printf("Public key (npub): %s\n", npub);
|
||||
|
||||
// Create event
|
||||
cJSON *event = nostr_create_event(1, "Hello, Nostr!", NULL);
|
||||
nostr_sign_event(event, nsec);
|
||||
|
||||
char *json = cJSON_Print(event);
|
||||
printf("\nSigned event:\n%s\n", json);
|
||||
|
||||
free(json);
|
||||
cJSON_Delete(event);
|
||||
return 0;
|
||||
}
|
||||
```
|
||||
|
||||
**Dockerfile.static:**
|
||||
```dockerfile
|
||||
FROM alpine:3.19 AS builder
|
||||
RUN apk add --no-cache build-base musl-dev git autoconf automake libtool \
|
||||
openssl-dev openssl-libs-static zlib-dev zlib-static
|
||||
|
||||
WORKDIR /build
|
||||
|
||||
# Build secp256k1
|
||||
RUN cd /tmp && git clone https://github.com/bitcoin-core/secp256k1.git && \
|
||||
cd secp256k1 && ./autogen.sh && \
|
||||
./configure --enable-static --disable-shared --prefix=/usr CFLAGS="-fPIC" && \
|
||||
make -j$(nproc) && make install
|
||||
|
||||
# Copy and build nostr_core_lib
|
||||
COPY nostr_core_lib /build/nostr_core_lib/
|
||||
RUN cd nostr_core_lib && \
|
||||
sed -i 's/CFLAGS="-Wall/CFLAGS="-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -Wall/' build.sh && \
|
||||
./build.sh --nips=1,6,19
|
||||
|
||||
# Build application
|
||||
COPY minimal_client.c /build/
|
||||
RUN gcc -static -O2 -Wall -std=c99 \
|
||||
-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 \
|
||||
-Inostr_core_lib -Inostr_core_lib/nostr_core -Inostr_core_lib/cjson \
|
||||
minimal_client.c -o /build/minimal_client_static \
|
||||
nostr_core_lib/libnostr_core_x64.a \
|
||||
-lssl -lcrypto -lsecp256k1 -lz -lpthread -lm -ldl && \
|
||||
strip /build/minimal_client_static
|
||||
|
||||
FROM scratch
|
||||
COPY --from=builder /build/minimal_client_static /minimal_client_static
|
||||
```
|
||||
|
||||
**Build and run:**
|
||||
```bash
|
||||
docker build -f Dockerfile.static -t minimal-client .
|
||||
docker create --name temp minimal-client
|
||||
docker cp temp:/minimal_client_static ./
|
||||
docker rm temp
|
||||
|
||||
./minimal_client_static
|
||||
```
|
||||
|
||||
## Conclusion
|
||||
|
||||
Static MUSL binaries provide the best portability for C applications. While they're slightly larger than dynamic binaries, the benefits of zero dependencies and universal compatibility make them ideal for:
|
||||
|
||||
- Server deployments across different Linux distributions
|
||||
- Embedded systems and IoT devices
|
||||
- Docker containers (FROM scratch)
|
||||
- Distribution to users without dependency management
|
||||
- Long-term archival and reproducibility
|
||||
|
||||
Follow this guide to create portable, self-contained binaries for your Nostr applications!
|
||||
58
api/embedded.html
Normal file
@@ -0,0 +1,58 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Embedded NOSTR_LOGIN_LITE</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
|
||||
margin: 0;
|
||||
padding: 40px;
|
||||
background: white;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
.container {
|
||||
max-width: 400px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#login-container {
|
||||
/* No styling - let embedded modal blend seamlessly */
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div id="login-container"></div>
|
||||
</div>
|
||||
|
||||
<script src="../lite/nostr.bundle.js"></script>
|
||||
<script src="../lite/nostr-lite.js"></script>
|
||||
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', async () => {
|
||||
await window.NOSTR_LOGIN_LITE.init({
|
||||
theme:'default',
|
||||
methods: {
|
||||
extension: true,
|
||||
local: true,
|
||||
seedphrase: true,
|
||||
readonly: true,
|
||||
connect: true,
|
||||
remote: true,
|
||||
otp: true
|
||||
}
|
||||
});
|
||||
|
||||
window.NOSTR_LOGIN_LITE.embed('#login-container', {
|
||||
seamless: true
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
922
api/index.css
466
api/index.html
@@ -4,214 +4,125 @@
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>C-Relay Admin API</title>
|
||||
<title>C-Relay Admin</title>
|
||||
<link rel="stylesheet" href="/api/index.css">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h1>C-RELAY ADMIN API</h1>
|
||||
<!-- Side Navigation Menu -->
|
||||
<nav class="side-nav" id="side-nav">
|
||||
<ul class="nav-menu">
|
||||
<li><button class="nav-item" data-page="statistics">Statistics</button></li>
|
||||
<li><button class="nav-item" data-page="subscriptions">Subscriptions</button></li>
|
||||
<li><button class="nav-item" data-page="configuration">Configuration</button></li>
|
||||
<li><button class="nav-item" data-page="authorization">Authorization</button></li>
|
||||
<li><button class="nav-item" data-page="relay-events">Relay Events</button></li>
|
||||
<li><button class="nav-item" data-page="dm">DM</button></li>
|
||||
<li><button class="nav-item" data-page="database">Database Query</button></li>
|
||||
</ul>
|
||||
<div class="nav-footer">
|
||||
<button class="nav-footer-btn" id="nav-dark-mode-btn">DARK MODE</button>
|
||||
<button class="nav-footer-btn" id="nav-logout-btn">LOGOUT</button>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<!-- Main Sections Wrapper -->
|
||||
<div class="main-sections-wrapper">
|
||||
<!-- Side Navigation Overlay -->
|
||||
<div class="side-nav-overlay" id="side-nav-overlay"></div>
|
||||
|
||||
<!-- Persistent Authentication Header - Always Visible -->
|
||||
<div id="persistent-auth-container" class="section flex-section">
|
||||
<div class="user-info-container">
|
||||
<button type="button" id="login-logout-btn" class="login-logout-btn">LOGIN</button>
|
||||
<div class="user-details" id="persistent-user-details" style="display: none;">
|
||||
<div><strong>Name:</strong> <span id="persistent-user-name">Loading...</span></div>
|
||||
<div><strong>Public Key:</strong>
|
||||
<div class="user-pubkey" id="persistent-user-pubkey">Loading...</div>
|
||||
</div>
|
||||
<div><strong>About:</strong> <span id="persistent-user-about">Loading...</span></div>
|
||||
<!-- Header with title and profile display -->
|
||||
<div class="section">
|
||||
|
||||
<div class="header-content">
|
||||
<div class="header-title clickable" id="header-title">
|
||||
<span class="relay-letter" data-letter="R">R</span>
|
||||
<span class="relay-letter" data-letter="E">E</span>
|
||||
<span class="relay-letter" data-letter="L">L</span>
|
||||
<span class="relay-letter" data-letter="A">A</span>
|
||||
<span class="relay-letter" data-letter="Y">Y</span>
|
||||
</div>
|
||||
<div class="relay-info">
|
||||
<div id="relay-name" class="relay-name">C-Relay</div>
|
||||
<div id="relay-description" class="relay-description">Loading...</div>
|
||||
<div id="relay-pubkey-container" class="relay-pubkey-container">
|
||||
<div id="relay-pubkey" class="relay-pubkey">Loading...</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Login Section -->
|
||||
<div id="login-section" class="flex-section">
|
||||
<div class="section">
|
||||
<h2>NOSTR AUTHENTICATION</h2>
|
||||
<p id="login-instructions">Please login with your Nostr identity to access the admin interface.</p>
|
||||
<!-- nostr-lite login UI will be injected here -->
|
||||
<div class="profile-area" id="profile-area" style="display: none;">
|
||||
<div class="admin-label">admin</div>
|
||||
<div class="profile-container">
|
||||
<img id="header-user-image" class="header-user-image" alt="Profile" style="display: none;">
|
||||
<span id="header-user-name" class="header-user-name">Loading...</span>
|
||||
</div>
|
||||
<!-- Logout dropdown -->
|
||||
<!-- Dropdown menu removed - buttons moved to sidebar -->
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Relay Connection Section -->
|
||||
<div id="relay-connection-section" class="flex-section">
|
||||
<div class="section">
|
||||
<h2>RELAY CONNECTION</h2>
|
||||
|
||||
<div class="input-group">
|
||||
<label for="relay-connection-url">Relay URL:</label>
|
||||
<input type="text" id="relay-connection-url" value=""
|
||||
placeholder="ws://localhost:8888 or wss://relay.example.com">
|
||||
</div>
|
||||
|
||||
<div class="input-group">
|
||||
<label for="relay-pubkey-manual">Relay Pubkey (if not available via NIP-11):</label>
|
||||
<input type="text" id="relay-pubkey-manual" placeholder="64-character hex pubkey"
|
||||
pattern="[0-9a-fA-F]{64}" title="64-character hexadecimal public key">
|
||||
|
||||
</div>
|
||||
|
||||
<div class="inline-buttons">
|
||||
<button type="button" id="connect-relay-btn">CONNECT TO RELAY</button>
|
||||
<button type="button" id="disconnect-relay-btn" disabled>DISCONNECT</button>
|
||||
<button type="button" id="restart-relay-btn" disabled>RESTART RELAY</button>
|
||||
</div>
|
||||
|
||||
<div class="status disconnected" id="relay-connection-status">NOT CONNECTED</div>
|
||||
|
||||
<!-- Relay Information Display -->
|
||||
<div id="relay-info-display" class="hidden">
|
||||
<h3>Relay Information (NIP-11)</h3>
|
||||
<table class="config-table" id="relay-info-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Property</th>
|
||||
<th>Value</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="relay-info-table-body">
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
</div> <!-- End Main Sections Wrapper -->
|
||||
|
||||
|
||||
|
||||
|
||||
<!-- Testing Section -->
|
||||
<div id="div_config" class="section flex-section" style="display: none;">
|
||||
<h2>RELAY CONFIGURATION</h2>
|
||||
<div id="config-display" class="hidden">
|
||||
<div class="config-table-container">
|
||||
<table class="config-table" id="config-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Parameter</th>
|
||||
<th>Value</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="config-table-body">
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="inline-buttons">
|
||||
<button type="button" id="fetch-config-btn">REFRESH</button>
|
||||
</div>
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Auth Rules Management - Moved after configuration -->
|
||||
<div class="section flex-section" id="authRulesSection" style="display: none;">
|
||||
<div class="section-header">
|
||||
<h2>AUTH RULES MANAGEMENT</h2>
|
||||
</div>
|
||||
|
||||
<!-- Auth Rules Table -->
|
||||
<div id="authRulesTableContainer" style="display: none;">
|
||||
<table class="config-table" id="authRulesTable">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Rule Type</th>
|
||||
<th>Pattern Type</th>
|
||||
<th>Pattern Value</th>
|
||||
<th>Action</th>
|
||||
<th>Status</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="authRulesTableBody">
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Simplified Auth Rule Input Section -->
|
||||
<div id="authRuleInputSections" style="display: block;">
|
||||
|
||||
<!-- Combined Pubkey Auth Rule Section -->
|
||||
|
||||
|
||||
<div class="input-group">
|
||||
<label for="authRulePubkey">Pubkey (nsec or hex):</label>
|
||||
<input type="text" id="authRulePubkey" placeholder="nsec1... or 64-character hex pubkey">
|
||||
|
||||
</div>
|
||||
<div id="whitelistWarning" class="warning-box" style="display: none;">
|
||||
<strong>⚠️ WARNING:</strong> Adding whitelist rules changes relay behavior to whitelist-only
|
||||
mode.
|
||||
Only whitelisted users will be able to interact with the relay.
|
||||
</div>
|
||||
<div class="inline-buttons">
|
||||
<button type="button" id="addWhitelistBtn" onclick="addWhitelistRule()">ADD TO
|
||||
WHITELIST</button>
|
||||
<button type="button" id="addBlacklistBtn" onclick="addBlacklistRule()">ADD TO
|
||||
BLACKLIST</button>
|
||||
<button type="button" id="refreshAuthRulesBtn">REFRESH</button>
|
||||
</div>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<!-- Login Modal Overlay -->
|
||||
<div id="login-modal" class="login-modal-overlay" style="display: none;">
|
||||
<div class="login-modal-content">
|
||||
<div id="login-modal-container"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- DATABASE STATISTICS Section -->
|
||||
<div class="section">
|
||||
<!-- Subscribe to kind 24567 events to receive real-time monitoring data -->
|
||||
<div class="section flex-section" id="databaseStatisticsSection" style="display: none;">
|
||||
<div class="section-header">
|
||||
<h2>DATABASE STATISTICS</h2>
|
||||
DATABASE STATISTICS
|
||||
</div>
|
||||
|
||||
|
||||
<!-- Event Rate Graph Container -->
|
||||
<div id="event-rate-chart"></div>
|
||||
|
||||
<!-- Database Overview Table -->
|
||||
<div class="input-group">
|
||||
<label>Database Overview:</label>
|
||||
<div class="config-table-container">
|
||||
<table class="config-table" id="stats-overview-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Metric</th>
|
||||
<th>Value</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="stats-overview-table-body">
|
||||
<tr>
|
||||
<td>Database Size</td>
|
||||
<td id="db-size">-</td>
|
||||
<td>Current database file size</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Total Events</td>
|
||||
<td id="total-events">-</td>
|
||||
<td>Total number of events stored</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Process ID</td>
|
||||
<td id="process-id">-</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Active Subscriptions</td>
|
||||
<td id="active-subscriptions">-</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Memory Usage</td>
|
||||
<td id="memory-usage">-</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>CPU Core</td>
|
||||
<td id="cpu-core">-</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>CPU Usage</td>
|
||||
<td id="cpu-usage">-</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Oldest Event</td>
|
||||
<td id="oldest-event">-</td>
|
||||
<td>Timestamp of oldest event</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Newest Event</td>
|
||||
<td id="newest-event">-</td>
|
||||
<td>Timestamp of newest event</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
@@ -248,24 +159,20 @@
|
||||
<tr>
|
||||
<th>Period</th>
|
||||
<th>Events</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="stats-time-table-body">
|
||||
<tr>
|
||||
<td>Last 24 Hours</td>
|
||||
<td id="events-24h">-</td>
|
||||
<td>Events in the last day</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Last 7 Days</td>
|
||||
<td id="events-7d">-</td>
|
||||
<td>Events in the last week</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Last 30 Days</td>
|
||||
<td id="events-30d">-</td>
|
||||
<td>Events in the last month</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
@@ -294,12 +201,113 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Refresh Button -->
|
||||
</div>
|
||||
|
||||
<!-- SUBSCRIPTION DETAILS Section (Admin Only) -->
|
||||
<div class="section flex-section" id="subscriptionDetailsSection" style="display: none;">
|
||||
<div class="section-header">
|
||||
ACTIVE SUBSCRIPTION DETAILS
|
||||
</div>
|
||||
|
||||
<div class="input-group">
|
||||
<button type="button" id="refresh-stats-btn">REFRESH STATISTICS</button>
|
||||
<div class="config-table-container">
|
||||
<table class="config-table" id="subscription-details-table">
|
||||
<tbody id="subscription-details-table-body">
|
||||
<tr>
|
||||
<td colspan="4" style="text-align: center; font-style: italic;">No subscriptions active</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Testing Section -->
|
||||
<div id="div_config" class="section flex-section" style="display: none;">
|
||||
<div class="section-header">
|
||||
RELAY CONFIGURATION
|
||||
</div>
|
||||
<div id="config-display" class="hidden">
|
||||
<div class="config-table-container">
|
||||
<table class="config-table" id="config-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Parameter</th>
|
||||
<th>Value</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="config-table-body">
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="inline-buttons">
|
||||
<button type="button" id="fetch-config-btn">REFRESH</button>
|
||||
</div>
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Auth Rules Management - Moved after configuration -->
|
||||
<div class="section flex-section" id="authRulesSection" style="display: none;">
|
||||
<div class="section-header">
|
||||
AUTH RULES MANAGEMENT
|
||||
</div>
|
||||
|
||||
<!-- Auth Rules Table -->
|
||||
<div id="authRulesTableContainer" style="display: none;">
|
||||
<table class="config-table" id="authRulesTable">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Rule Type</th>
|
||||
<th>Pattern Type</th>
|
||||
<th>Pattern Value</th>
|
||||
<th>Status</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="authRulesTableBody">
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Simplified Auth Rule Input Section -->
|
||||
<div id="authRuleInputSections" style="display: block;">
|
||||
|
||||
<!-- Combined Pubkey Auth Rule Section -->
|
||||
|
||||
|
||||
<div class="input-group">
|
||||
<label for="authRulePubkey">Pubkey (nsec or hex):</label>
|
||||
<input type="text" id="authRulePubkey" placeholder="nsec1... or 64-character hex pubkey">
|
||||
|
||||
</div>
|
||||
<div id="whitelistWarning" class="warning-box" style="display: none;">
|
||||
<strong>⚠️ WARNING:</strong> Adding whitelist rules changes relay behavior to whitelist-only
|
||||
mode.
|
||||
Only whitelisted users will be able to interact with the relay.
|
||||
</div>
|
||||
<div class="inline-buttons">
|
||||
<button type="button" id="addWhitelistBtn" onclick="addWhitelistRule()">ADD TO
|
||||
WHITELIST</button>
|
||||
<button type="button" id="addBlacklistBtn" onclick="addBlacklistRule()">ADD TO
|
||||
BLACKLIST</button>
|
||||
<button type="button" id="refreshAuthRulesBtn">REFRESH</button>
|
||||
</div>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<!-- NIP-17 DIRECT MESSAGES Section -->
|
||||
<div class="section" id="nip17DMSection" style="display: none;">
|
||||
<div class="section-header">
|
||||
@@ -307,7 +315,7 @@
|
||||
</div>
|
||||
|
||||
<!-- Outbox -->
|
||||
<div class="input-group">
|
||||
<div>
|
||||
<label for="dm-outbox">Send Message to Relay:</label>
|
||||
<textarea id="dm-outbox" rows="4" placeholder="Enter your message to send to the relay..."></textarea>
|
||||
</div>
|
||||
@@ -326,6 +334,120 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- RELAY EVENTS Section -->
|
||||
<div class="section" id="relayEventsSection" style="display: none;">
|
||||
<div class="section-header">
|
||||
RELAY EVENTS MANAGEMENT
|
||||
</div>
|
||||
|
||||
<!-- Kind 0: User Metadata -->
|
||||
<div class="input-group">
|
||||
<h3>Kind 0: User Metadata</h3>
|
||||
<div class="form-group">
|
||||
<label for="kind0-name">Name:</label>
|
||||
<input type="text" id="kind0-name" placeholder="Relay Name">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="kind0-about">About:</label>
|
||||
<textarea id="kind0-about" rows="3" placeholder="Relay Description"></textarea>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="kind0-picture">Picture URL:</label>
|
||||
<input type="url" id="kind0-picture" placeholder="https://example.com/logo.png">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="kind0-banner">Banner URL:</label>
|
||||
<input type="url" id="kind0-banner" placeholder="https://example.com/banner.png">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="kind0-nip05">NIP-05:</label>
|
||||
<input type="text" id="kind0-nip05" placeholder="relay@example.com">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="kind0-website">Website:</label>
|
||||
<input type="url" id="kind0-website" placeholder="https://example.com">
|
||||
</div>
|
||||
<div class="inline-buttons">
|
||||
<button type="button" id="submit-kind0-btn">UPDATE METADATA</button>
|
||||
</div>
|
||||
<div id="kind0-status" class="status-message"></div>
|
||||
</div>
|
||||
|
||||
<!-- Kind 10050: DM Relay List -->
|
||||
<div class="input-group">
|
||||
<h3>Kind 10050: DM Relay List</h3>
|
||||
<div class="form-group">
|
||||
<label for="kind10050-relays">Relay URLs (one per line):</label>
|
||||
<textarea id="kind10050-relays" rows="4" placeholder="wss://relay1.com wss://relay2.com"></textarea>
|
||||
</div>
|
||||
<div class="inline-buttons">
|
||||
<button type="button" id="submit-kind10050-btn">UPDATE DM RELAYS</button>
|
||||
</div>
|
||||
<div id="kind10050-status" class="status-message"></div>
|
||||
</div>
|
||||
|
||||
<!-- Kind 10002: Relay List -->
|
||||
<div class="input-group">
|
||||
<h3>Kind 10002: Relay List</h3>
|
||||
<div id="kind10002-relay-entries">
|
||||
<!-- Dynamic relay entries will be added here -->
|
||||
</div>
|
||||
<div class="inline-buttons">
|
||||
<button type="button" id="add-relay-entry-btn">ADD RELAY</button>
|
||||
<button type="button" id="submit-kind10002-btn">UPDATE RELAYS</button>
|
||||
</div>
|
||||
<div id="kind10002-status" class="status-message"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- SQL QUERY Section -->
|
||||
<div class="section" id="sqlQuerySection" style="display: none;">
|
||||
<div class="section-header">
|
||||
<h2>SQL QUERY CONSOLE</h2>
|
||||
</div>
|
||||
|
||||
<!-- Query Selector -->
|
||||
<div class="input-group">
|
||||
<label for="query-dropdown">Quick Queries & History:</label>
|
||||
<select id="query-dropdown" onchange="loadSelectedQuery()">
|
||||
<option value="">-- Select a query --</option>
|
||||
<optgroup label="Common Queries">
|
||||
<option value="recent_events">Recent Events</option>
|
||||
<option value="event_stats">Event Statistics</option>
|
||||
<option value="subscriptions">Active Subscriptions</option>
|
||||
<option value="top_pubkeys">Top Pubkeys</option>
|
||||
<option value="event_kinds">Event Kinds Distribution</option>
|
||||
<option value="time_stats">Time-based Statistics</option>
|
||||
</optgroup>
|
||||
<optgroup label="Query History" id="history-group">
|
||||
<!-- Dynamically populated from localStorage -->
|
||||
</optgroup>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<!-- Query Editor -->
|
||||
<div class="input-group">
|
||||
<label for="sql-input">SQL Query:</label>
|
||||
<textarea id="sql-input" rows="5" placeholder="SELECT * FROM events LIMIT 10"></textarea>
|
||||
</div>
|
||||
|
||||
<!-- Query Actions -->
|
||||
<div class="input-group">
|
||||
<div class="inline-buttons">
|
||||
<button type="button" id="execute-sql-btn">EXECUTE QUERY</button>
|
||||
<button type="button" id="clear-sql-btn">CLEAR</button>
|
||||
<button type="button" id="clear-history-btn">CLEAR HISTORY</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Query Results -->
|
||||
<div class="input-group">
|
||||
<label>Query Results:</label>
|
||||
<div id="query-info" class="info-box"></div>
|
||||
<div id="query-table" class="config-table-container"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Load the official nostr-tools bundle first -->
|
||||
<!-- <script src="https://laantungir.net/nostr-login-lite/nostr.bundle.js"></script> -->
|
||||
<script src="/api/nostr.bundle.js"></script>
|
||||
@@ -333,6 +455,8 @@
|
||||
<!-- Load NOSTR_LOGIN_LITE main library -->
|
||||
<!-- <script src="https://laantungir.net/nostr-login-lite/nostr-lite.js"></script> -->
|
||||
<script src="/api/nostr-lite.js"></script>
|
||||
<!-- Load text_graph library -->
|
||||
<script src="/api/text_graph.js"></script>
|
||||
|
||||
|
||||
|
||||
|
||||
8504
api/index.js
463
api/text_graph.js
Normal file
@@ -0,0 +1,463 @@
|
||||
/**
|
||||
* ASCIIBarChart - A dynamic ASCII-based vertical bar chart renderer
|
||||
*
|
||||
* Creates real-time animated bar charts using monospaced characters (X)
|
||||
* with automatic scaling, labels, and responsive font sizing.
|
||||
*/
|
||||
class ASCIIBarChart {
|
||||
/**
|
||||
* Create a new ASCII bar chart
|
||||
* @param {string} containerId - The ID of the HTML element to render the chart in
|
||||
* @param {Object} options - Configuration options
|
||||
* @param {number} [options.maxHeight=20] - Maximum height of the chart in rows
|
||||
* @param {number} [options.maxDataPoints=30] - Maximum number of data columns before scrolling
|
||||
* @param {string} [options.title=''] - Chart title (displayed centered at top)
|
||||
* @param {string} [options.xAxisLabel=''] - X-axis label (displayed centered at bottom)
|
||||
* @param {string} [options.yAxisLabel=''] - Y-axis label (displayed vertically on left)
|
||||
* @param {boolean} [options.autoFitWidth=true] - Automatically adjust font size to fit container width
|
||||
* @param {boolean} [options.useBinMode=false] - Enable time bin mode for data aggregation
|
||||
* @param {number} [options.binDuration=10000] - Duration of each time bin in milliseconds (10 seconds default)
|
||||
* @param {string} [options.xAxisLabelFormat='elapsed'] - X-axis label format: 'elapsed', 'bins', 'timestamps', 'ranges'
|
||||
* @param {boolean} [options.debug=false] - Enable debug logging
|
||||
*/
|
||||
constructor(containerId, options = {}) {
|
||||
this.container = document.getElementById(containerId);
|
||||
this.data = [];
|
||||
this.maxHeight = options.maxHeight || 20;
|
||||
this.maxDataPoints = options.maxDataPoints || 30;
|
||||
this.totalDataPoints = 0; // Track total number of data points added
|
||||
this.title = options.title || '';
|
||||
this.xAxisLabel = options.xAxisLabel || '';
|
||||
this.yAxisLabel = options.yAxisLabel || '';
|
||||
this.autoFitWidth = options.autoFitWidth !== false; // Default to true
|
||||
this.debug = options.debug || false; // Debug logging option
|
||||
|
||||
// Time bin configuration
|
||||
this.useBinMode = options.useBinMode !== false; // Default to true
|
||||
this.binDuration = options.binDuration || 4000; // 4 seconds default
|
||||
this.xAxisLabelFormat = options.xAxisLabelFormat || 'elapsed';
|
||||
|
||||
// Time bin data structures
|
||||
this.bins = [];
|
||||
this.currentBinIndex = -1;
|
||||
this.binStartTime = null;
|
||||
this.binCheckInterval = null;
|
||||
this.chartStartTime = Date.now();
|
||||
|
||||
// Set up resize observer if auto-fit is enabled
|
||||
if (this.autoFitWidth) {
|
||||
this.resizeObserver = new ResizeObserver(() => {
|
||||
this.adjustFontSize();
|
||||
});
|
||||
this.resizeObserver.observe(this.container);
|
||||
}
|
||||
|
||||
// Initialize first bin if bin mode is enabled
|
||||
if (this.useBinMode) {
|
||||
this.initializeBins();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new data point to the chart
|
||||
* @param {number} value - The numeric value to add
|
||||
*/
|
||||
addValue(value) {
|
||||
// Time bin mode: add value to current active bin count
|
||||
this.checkBinRotation(); // Ensure we have an active bin
|
||||
this.bins[this.currentBinIndex].count += value; // Changed from ++ to += value
|
||||
this.totalDataPoints++;
|
||||
|
||||
this.render();
|
||||
this.updateInfo();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all data from the chart
|
||||
*/
|
||||
clear() {
|
||||
this.data = [];
|
||||
this.totalDataPoints = 0;
|
||||
|
||||
if (this.useBinMode) {
|
||||
this.bins = [];
|
||||
this.currentBinIndex = -1;
|
||||
this.binStartTime = null;
|
||||
this.initializeBins();
|
||||
}
|
||||
|
||||
this.render();
|
||||
this.updateInfo();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the width of the chart in characters
|
||||
* @returns {number} The chart width in characters
|
||||
* @private
|
||||
*/
|
||||
getChartWidth() {
|
||||
let dataLength = this.maxDataPoints; // Always use maxDataPoints for consistent width
|
||||
|
||||
if (dataLength === 0) return 50; // Default width for empty chart
|
||||
|
||||
const yAxisPadding = this.yAxisLabel ? 2 : 0;
|
||||
const yAxisNumbers = 3; // Width of Y-axis numbers
|
||||
const separator = 1; // The '|' character
|
||||
// const dataWidth = dataLength * 2; // Each column is 2 characters wide // TEMP: commented for no-space test
|
||||
const dataWidth = dataLength; // Each column is 1 character wide // TEMP: adjusted for no-space columns
|
||||
const padding = 1; // Extra padding
|
||||
|
||||
const totalWidth = yAxisPadding + yAxisNumbers + separator + dataWidth + padding;
|
||||
|
||||
// Only log when width changes
|
||||
if (this.debug && this.lastChartWidth !== totalWidth) {
|
||||
console.log('getChartWidth changed:', { dataLength, totalWidth, previous: this.lastChartWidth });
|
||||
this.lastChartWidth = totalWidth;
|
||||
}
|
||||
|
||||
return totalWidth;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adjust font size to fit container width
|
||||
* @private
|
||||
*/
|
||||
adjustFontSize() {
|
||||
if (!this.autoFitWidth) return;
|
||||
|
||||
const containerWidth = this.container.clientWidth;
|
||||
const chartWidth = this.getChartWidth();
|
||||
|
||||
if (chartWidth === 0) return;
|
||||
|
||||
// Calculate optimal font size
|
||||
// For monospace fonts, character width is approximately 0.6 * font size
|
||||
// Use a slightly smaller ratio to fit more content
|
||||
const charWidthRatio = 0.7;
|
||||
const padding = 30; // Reduce padding to fit more content
|
||||
const availableWidth = containerWidth - padding;
|
||||
const optimalFontSize = Math.floor((availableWidth / chartWidth) / charWidthRatio);
|
||||
|
||||
// Set reasonable bounds (min 4px, max 20px)
|
||||
const fontSize = Math.max(4, Math.min(20, optimalFontSize));
|
||||
|
||||
// Only log when font size changes
|
||||
if (this.debug && this.lastFontSize !== fontSize) {
|
||||
console.log('fontSize changed:', { containerWidth, chartWidth, fontSize, previous: this.lastFontSize });
|
||||
this.lastFontSize = fontSize;
|
||||
}
|
||||
|
||||
this.container.style.fontSize = fontSize + 'px';
|
||||
this.container.style.lineHeight = '1.0';
|
||||
}
|
||||
|
||||
/**
|
||||
* Render the chart to the container
|
||||
* @private
|
||||
*/
|
||||
render() {
|
||||
let dataToRender = [];
|
||||
let maxValue = 0;
|
||||
let minValue = 0;
|
||||
let valueRange = 0;
|
||||
|
||||
if (this.useBinMode) {
|
||||
// Bin mode: render bin counts
|
||||
if (this.bins.length === 0) {
|
||||
this.container.textContent = 'No data yet. Click Start to begin.';
|
||||
return;
|
||||
}
|
||||
// Always create a fixed-length array filled with 0s, then overlay actual bin data
|
||||
dataToRender = new Array(this.maxDataPoints).fill(0);
|
||||
|
||||
// Overlay actual bin data (most recent bins, reversed for left-to-right display)
|
||||
const startIndex = Math.max(0, this.bins.length - this.maxDataPoints);
|
||||
const recentBins = this.bins.slice(startIndex);
|
||||
|
||||
// Reverse the bins so most recent is on the left, and overlay onto the fixed array
|
||||
recentBins.reverse().forEach((bin, index) => {
|
||||
if (index < this.maxDataPoints) {
|
||||
dataToRender[index] = bin.count;
|
||||
}
|
||||
});
|
||||
|
||||
if (this.debug) {
|
||||
console.log('render() dataToRender:', dataToRender, 'bins length:', this.bins.length);
|
||||
}
|
||||
maxValue = Math.max(...dataToRender);
|
||||
minValue = Math.min(...dataToRender);
|
||||
valueRange = maxValue - minValue;
|
||||
} else {
|
||||
// Legacy mode: render individual values
|
||||
if (this.data.length === 0) {
|
||||
this.container.textContent = 'No data yet. Click Start to begin.';
|
||||
return;
|
||||
}
|
||||
dataToRender = this.data;
|
||||
maxValue = Math.max(...this.data);
|
||||
minValue = Math.min(...this.data);
|
||||
valueRange = maxValue - minValue;
|
||||
}
|
||||
|
||||
let output = '';
|
||||
const scale = this.maxHeight;
|
||||
|
||||
// Calculate scaling factor: each X represents at least 1 count
|
||||
const maxCount = Math.max(...dataToRender);
|
||||
const scaleFactor = Math.max(1, Math.ceil(maxCount / scale)); // 1 X = scaleFactor counts
|
||||
const scaledMax = Math.ceil(maxCount / scaleFactor) * scaleFactor;
|
||||
|
||||
// Calculate Y-axis label width (for vertical text)
|
||||
const yLabelWidth = this.yAxisLabel ? 2 : 0;
|
||||
const yAxisPadding = this.yAxisLabel ? ' ' : '';
|
||||
|
||||
// Add title if provided (centered)
|
||||
if (this.title) {
|
||||
// const chartWidth = 4 + this.maxDataPoints * 2; // Y-axis numbers + data columns // TEMP: commented for no-space test
|
||||
const chartWidth = 4 + this.maxDataPoints; // Y-axis numbers + data columns // TEMP: adjusted for no-space columns
|
||||
const titlePadding = Math.floor((chartWidth - this.title.length) / 2);
|
||||
output += yAxisPadding + ' '.repeat(Math.max(0, titlePadding)) + this.title + '\n\n';
|
||||
}
|
||||
|
||||
// Draw from top to bottom
|
||||
for (let row = scale; row > 0; row--) {
|
||||
let line = '';
|
||||
|
||||
// Add vertical Y-axis label character
|
||||
if (this.yAxisLabel) {
|
||||
const L = this.yAxisLabel.length;
|
||||
const startRow = Math.floor((scale - L) / 2) + 1;
|
||||
const relativeRow = scale - row + 1; // 1 at top, scale at bottom
|
||||
if (relativeRow >= startRow && relativeRow < startRow + L) {
|
||||
const labelIndex = relativeRow - startRow;
|
||||
line += this.yAxisLabel[labelIndex] + ' ';
|
||||
} else {
|
||||
line += ' ';
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate the actual count value this row represents (1 at bottom, increasing upward)
|
||||
const rowCount = (row - 1) * scaleFactor + 1;
|
||||
|
||||
// Add Y-axis label (show actual count values)
|
||||
line += String(rowCount).padStart(3, ' ') + ' |';
|
||||
|
||||
// Draw each column
|
||||
for (let i = 0; i < dataToRender.length; i++) {
|
||||
const count = dataToRender[i];
|
||||
const scaledHeight = Math.ceil(count / scaleFactor);
|
||||
|
||||
if (scaledHeight >= row) {
|
||||
// line += ' X'; // TEMP: commented out space between columns
|
||||
line += 'X'; // TEMP: no space between columns
|
||||
} else {
|
||||
// line += ' '; // TEMP: commented out space between columns
|
||||
line += ' '; // TEMP: single space for empty columns
|
||||
}
|
||||
}
|
||||
|
||||
output += line + '\n';
|
||||
}
|
||||
|
||||
// Draw X-axis
|
||||
// output += yAxisPadding + ' +' + '-'.repeat(this.maxDataPoints * 2) + '\n'; // TEMP: commented out for no-space test
|
||||
output += yAxisPadding + ' +' + '-'.repeat(this.maxDataPoints) + '\n'; // TEMP: back to original length
|
||||
|
||||
// Draw X-axis labels based on mode and format
|
||||
let xAxisLabels = yAxisPadding + ' '; // Initial padding to align with X-axis
|
||||
|
||||
// Determine label interval (every 5 columns)
|
||||
const labelInterval = 5;
|
||||
|
||||
// Generate all labels first and store in array
|
||||
let labels = [];
|
||||
for (let i = 0; i < this.maxDataPoints; i++) {
|
||||
if (i % labelInterval === 0) {
|
||||
let label = '';
|
||||
if (this.useBinMode) {
|
||||
// For bin mode, show labels for all possible positions
|
||||
// i=0 is leftmost (most recent), i=maxDataPoints-1 is rightmost (oldest)
|
||||
const elapsedSec = (i * this.binDuration) / 1000;
|
||||
// Format with appropriate precision for sub-second bins
|
||||
if (this.binDuration < 1000) {
|
||||
// Show decimal seconds for sub-second bins
|
||||
label = elapsedSec.toFixed(1) + 's';
|
||||
} else {
|
||||
// Show whole seconds for 1+ second bins
|
||||
label = String(Math.round(elapsedSec)) + 's';
|
||||
}
|
||||
} else {
|
||||
// For legacy mode, show data point numbers
|
||||
const startIndex = Math.max(1, this.totalDataPoints - this.maxDataPoints + 1);
|
||||
label = String(startIndex + i);
|
||||
}
|
||||
labels.push(label);
|
||||
}
|
||||
}
|
||||
|
||||
// Build the label string with calculated spacing
|
||||
for (let i = 0; i < labels.length; i++) {
|
||||
const label = labels[i];
|
||||
xAxisLabels += label;
|
||||
|
||||
// Add spacing: labelInterval - label.length (except for last label)
|
||||
if (i < labels.length - 1) {
|
||||
const spacing = labelInterval - label.length;
|
||||
xAxisLabels += ' '.repeat(spacing);
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure the label line extends to match the X-axis dash line length
|
||||
// The dash line is this.maxDataPoints characters long, starting after " +"
|
||||
const dashLineLength = this.maxDataPoints;
|
||||
const minLabelLineLength = yAxisPadding.length + 4 + dashLineLength; // 4 for " "
|
||||
if (xAxisLabels.length < minLabelLineLength) {
|
||||
xAxisLabels += ' '.repeat(minLabelLineLength - xAxisLabels.length);
|
||||
}
|
||||
output += xAxisLabels + '\n';
|
||||
|
||||
// Add X-axis label if provided
|
||||
if (this.xAxisLabel) {
|
||||
// const labelPadding = Math.floor((this.maxDataPoints * 2 - this.xAxisLabel.length) / 2); // TEMP: commented for no-space test
|
||||
const labelPadding = Math.floor((this.maxDataPoints - this.xAxisLabel.length) / 2); // TEMP: adjusted for no-space columns
|
||||
output += '\n' + yAxisPadding + ' ' + ' '.repeat(Math.max(0, labelPadding)) + this.xAxisLabel + '\n';
|
||||
}
|
||||
|
||||
this.container.textContent = output;
|
||||
|
||||
// Adjust font size to fit width (only once at initialization)
|
||||
if (this.autoFitWidth) {
|
||||
this.adjustFontSize();
|
||||
}
|
||||
|
||||
// Update the external info display
|
||||
if (this.useBinMode) {
|
||||
const binCounts = this.bins.map(bin => bin.count);
|
||||
const scaleFactor = Math.max(1, Math.ceil(maxValue / scale));
|
||||
document.getElementById('values').textContent = `[${dataToRender.join(', ')}]`;
|
||||
document.getElementById('max-value').textContent = maxValue;
|
||||
document.getElementById('scale').textContent = `Min: ${minValue}, Max: ${maxValue}, 1X=${scaleFactor} counts`;
|
||||
} else {
|
||||
document.getElementById('values').textContent = `[${this.data.join(', ')}]`;
|
||||
document.getElementById('max-value').textContent = maxValue;
|
||||
document.getElementById('scale').textContent = `Min: ${minValue}, Max: ${maxValue}, Height: ${scale}`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the info display
|
||||
* @private
|
||||
*/
|
||||
updateInfo() {
|
||||
if (this.useBinMode) {
|
||||
const totalCount = this.bins.reduce((sum, bin) => sum + bin.count, 0);
|
||||
document.getElementById('count').textContent = totalCount;
|
||||
} else {
|
||||
document.getElementById('count').textContent = this.data.length;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the bin system
|
||||
* @private
|
||||
*/
|
||||
initializeBins() {
|
||||
this.bins = [];
|
||||
this.currentBinIndex = -1;
|
||||
this.binStartTime = null;
|
||||
this.chartStartTime = Date.now();
|
||||
|
||||
// Create first bin
|
||||
this.rotateBin();
|
||||
|
||||
// Set up automatic bin rotation check
|
||||
this.binCheckInterval = setInterval(() => {
|
||||
this.checkBinRotation();
|
||||
}, 100); // Check every 100ms for responsiveness
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if current bin should rotate and create new bin if needed
|
||||
* @private
|
||||
*/
|
||||
checkBinRotation() {
|
||||
if (!this.useBinMode || !this.binStartTime) return;
|
||||
|
||||
const now = Date.now();
|
||||
if ((now - this.binStartTime) >= this.binDuration) {
|
||||
this.rotateBin();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rotate to a new bin, finalizing the current one
|
||||
*/
|
||||
rotateBin() {
|
||||
// Finalize current bin if it exists
|
||||
if (this.currentBinIndex >= 0) {
|
||||
this.bins[this.currentBinIndex].isActive = false;
|
||||
}
|
||||
|
||||
// Create new bin
|
||||
const newBin = {
|
||||
startTime: Date.now(),
|
||||
count: 0,
|
||||
isActive: true
|
||||
};
|
||||
|
||||
this.bins.push(newBin);
|
||||
this.currentBinIndex = this.bins.length - 1;
|
||||
this.binStartTime = newBin.startTime;
|
||||
|
||||
// Keep only the most recent bins
|
||||
if (this.bins.length > this.maxDataPoints) {
|
||||
this.bins.shift();
|
||||
this.currentBinIndex--;
|
||||
}
|
||||
|
||||
// Ensure currentBinIndex points to the last bin (the active one)
|
||||
this.currentBinIndex = this.bins.length - 1;
|
||||
|
||||
// Force a render to update the display immediately
|
||||
this.render();
|
||||
this.updateInfo();
|
||||
}
|
||||
|
||||
/**
|
||||
* Format X-axis label for a bin based on the configured format
|
||||
* @param {number} binIndex - Index of the bin
|
||||
* @returns {string} Formatted label
|
||||
* @private
|
||||
*/
|
||||
formatBinLabel(binIndex) {
|
||||
const bin = this.bins[binIndex];
|
||||
if (!bin) return ' ';
|
||||
|
||||
switch (this.xAxisLabelFormat) {
|
||||
case 'bins':
|
||||
return String(binIndex + 1).padStart(2, ' ');
|
||||
|
||||
case 'timestamps':
|
||||
const time = new Date(bin.startTime);
|
||||
return time.toLocaleTimeString('en-US', {
|
||||
hour12: false,
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
second: '2-digit'
|
||||
}).replace(/:/g, '');
|
||||
|
||||
case 'ranges':
|
||||
const startSec = Math.floor((bin.startTime - this.chartStartTime) / 1000);
|
||||
const endSec = startSec + Math.floor(this.binDuration / 1000);
|
||||
return `${startSec}-${endSec}`;
|
||||
|
||||
case 'elapsed':
|
||||
default:
|
||||
// For elapsed time, always show time relative to the first bin (index 0)
|
||||
// This keeps the leftmost label as 0s and increases to the right
|
||||
const firstBinTime = this.bins[0] ? this.bins[0].startTime : this.chartStartTime;
|
||||
const elapsedSec = Math.floor((bin.startTime - firstBinTime) / 1000);
|
||||
return String(elapsedSec).padStart(2, ' ') + 's';
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,616 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
print_status() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
print_success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||
print_warning() { echo -e "${YELLOW}[WARNING]${NC} $1"; }
|
||||
print_error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
|
||||
# Global variables
|
||||
COMMIT_MESSAGE=""
|
||||
RELEASE_MODE=false
|
||||
|
||||
show_usage() {
|
||||
echo "C-Relay Build and Push Script"
|
||||
echo ""
|
||||
echo "Usage:"
|
||||
echo " $0 \"commit message\" - Default: compile, increment patch, commit & push"
|
||||
echo " $0 -r \"commit message\" - Release: compile x86+arm64, increment minor, create release"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 \"Fixed event validation bug\""
|
||||
echo " $0 --release \"Major release with new features\""
|
||||
echo ""
|
||||
echo "Default Mode (patch increment):"
|
||||
echo " - Compile C-Relay"
|
||||
echo " - Increment patch version (v1.2.3 → v1.2.4)"
|
||||
echo " - Git add, commit with message, and push"
|
||||
echo ""
|
||||
echo "Release Mode (-r flag):"
|
||||
echo " - Compile C-Relay for x86_64 and arm64 (dynamic and static versions)"
|
||||
echo " - Increment minor version, zero patch (v1.2.3 → v1.3.0)"
|
||||
echo " - Git add, commit, push, and create Gitea release"
|
||||
echo ""
|
||||
echo "Requirements for Release Mode:"
|
||||
echo " - For ARM64 builds: make install-arm64-deps (optional - will build x86_64 only if missing)"
|
||||
echo " - For static builds: sudo apt-get install musl-dev libcap-dev libuv1-dev libev-dev"
|
||||
echo " - Gitea token in ~/.gitea_token for release uploads"
|
||||
}
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-r|--release)
|
||||
RELEASE_MODE=true
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
show_usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
# First non-flag argument is the commit message
|
||||
if [[ -z "$COMMIT_MESSAGE" ]]; then
|
||||
COMMIT_MESSAGE="$1"
|
||||
fi
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Validate inputs
|
||||
if [[ -z "$COMMIT_MESSAGE" ]]; then
|
||||
print_error "Commit message is required"
|
||||
echo ""
|
||||
show_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if we're in a git repository
|
||||
check_git_repo() {
|
||||
if ! git rev-parse --git-dir > /dev/null 2>&1; then
|
||||
print_error "Not in a git repository"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to get current version and increment appropriately
|
||||
increment_version() {
|
||||
local increment_type="$1" # "patch" or "minor"
|
||||
|
||||
print_status "Getting current version..."
|
||||
|
||||
# Get the highest version tag (not chronologically latest)
|
||||
LATEST_TAG=$(git tag -l 'v*.*.*' | sort -V | tail -n 1 || echo "")
|
||||
if [[ -z "$LATEST_TAG" ]]; then
|
||||
LATEST_TAG="v0.0.0"
|
||||
print_warning "No version tags found, starting from $LATEST_TAG"
|
||||
fi
|
||||
|
||||
# Extract version components (remove 'v' prefix)
|
||||
VERSION=${LATEST_TAG#v}
|
||||
|
||||
# Parse major.minor.patch using regex
|
||||
if [[ $VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR=${BASH_REMATCH[1]}
|
||||
MINOR=${BASH_REMATCH[2]}
|
||||
PATCH=${BASH_REMATCH[3]}
|
||||
else
|
||||
print_error "Invalid version format in tag: $LATEST_TAG"
|
||||
print_error "Expected format: v0.1.0"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Increment version based on type
|
||||
if [[ "$increment_type" == "minor" ]]; then
|
||||
# Minor release: increment minor, zero patch
|
||||
NEW_MINOR=$((MINOR + 1))
|
||||
NEW_PATCH=0
|
||||
NEW_VERSION="v${MAJOR}.${NEW_MINOR}.${NEW_PATCH}"
|
||||
print_status "Release mode: incrementing minor version"
|
||||
else
|
||||
# Default: increment patch
|
||||
NEW_PATCH=$((PATCH + 1))
|
||||
NEW_VERSION="v${MAJOR}.${MINOR}.${NEW_PATCH}"
|
||||
print_status "Default mode: incrementing patch version"
|
||||
fi
|
||||
|
||||
print_status "Current version: $LATEST_TAG"
|
||||
print_status "New version: $NEW_VERSION"
|
||||
|
||||
# Export for use in other functions
|
||||
export NEW_VERSION
|
||||
}
|
||||
|
||||
# Function to compile the C-Relay project
|
||||
compile_project() {
|
||||
print_status "Compiling C-Relay..."
|
||||
|
||||
# Clean previous build
|
||||
if make clean > /dev/null 2>&1; then
|
||||
print_success "Cleaned previous build"
|
||||
else
|
||||
print_warning "Clean failed or no Makefile found"
|
||||
fi
|
||||
|
||||
# Force regenerate main.h to pick up new tags
|
||||
if make force-version > /dev/null 2>&1; then
|
||||
print_success "Regenerated main.h"
|
||||
else
|
||||
print_warning "Failed to regenerate main.h"
|
||||
fi
|
||||
|
||||
# Compile the project
|
||||
if make > /dev/null 2>&1; then
|
||||
print_success "C-Relay compiled successfully"
|
||||
else
|
||||
print_error "Compilation failed"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to build release binaries
|
||||
build_release_binaries() {
|
||||
print_status "Building release binaries..."
|
||||
|
||||
# Build x86_64 version
|
||||
print_status "Building x86_64 version..."
|
||||
make clean > /dev/null 2>&1
|
||||
if make x86 > /dev/null 2>&1; then
|
||||
if [[ -f "build/c_relay_x86" ]]; then
|
||||
cp build/c_relay_x86 c-relay-x86_64
|
||||
print_success "x86_64 binary created: c-relay-x86_64"
|
||||
else
|
||||
print_error "x86_64 binary not found after compilation"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
print_error "x86_64 build failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Try to build ARM64 version
|
||||
print_status "Attempting ARM64 build..."
|
||||
make clean > /dev/null 2>&1
|
||||
if make arm64 > /dev/null 2>&1; then
|
||||
if [[ -f "build/c_relay_arm64" ]]; then
|
||||
cp build/c_relay_arm64 c-relay-arm64
|
||||
print_success "ARM64 binary created: c-relay-arm64"
|
||||
else
|
||||
print_warning "ARM64 binary not found after compilation"
|
||||
fi
|
||||
else
|
||||
print_warning "ARM64 build failed - ARM64 cross-compilation not properly set up"
|
||||
print_status "Only x86_64 binary will be included in release"
|
||||
fi
|
||||
|
||||
# Build static x86_64 version
|
||||
print_status "Building static x86_64 version..."
|
||||
make clean > /dev/null 2>&1
|
||||
if make static-musl-x86_64 > /dev/null 2>&1; then
|
||||
if [[ -f "build/c_relay_static_musl_x86_64" ]]; then
|
||||
cp build/c_relay_static_musl_x86_64 c-relay-static-x86_64
|
||||
print_success "Static x86_64 binary created: c-relay-static-x86_64"
|
||||
else
|
||||
print_warning "Static x86_64 binary not found after compilation"
|
||||
fi
|
||||
else
|
||||
print_warning "Static x86_64 build failed - MUSL development packages may not be installed"
|
||||
print_status "Run 'sudo apt-get install musl-dev libcap-dev libuv1-dev libev-dev' to enable static builds"
|
||||
fi
|
||||
|
||||
# Try to build static ARM64 version
|
||||
print_status "Attempting static ARM64 build..."
|
||||
make clean > /dev/null 2>&1
|
||||
if make static-musl-arm64 > /dev/null 2>&1; then
|
||||
if [[ -f "build/c_relay_static_musl_arm64" ]]; then
|
||||
cp build/c_relay_static_musl_arm64 c-relay-static-arm64
|
||||
print_success "Static ARM64 binary created: c-relay-static-arm64"
|
||||
else
|
||||
print_warning "Static ARM64 binary not found after compilation"
|
||||
fi
|
||||
else
|
||||
print_warning "Static ARM64 build failed - ARM64 cross-compilation or MUSL ARM64 packages not set up"
|
||||
fi
|
||||
|
||||
# Restore normal build
|
||||
make clean > /dev/null 2>&1
|
||||
make > /dev/null 2>&1
|
||||
}
|
||||
|
||||
# Function to commit and push changes
|
||||
git_commit_and_push() {
|
||||
print_status "Preparing git commit..."
|
||||
|
||||
# Stage all changes
|
||||
if git add . > /dev/null 2>&1; then
|
||||
print_success "Staged all changes"
|
||||
else
|
||||
print_error "Failed to stage changes"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if there are changes to commit
|
||||
if git diff --staged --quiet; then
|
||||
print_warning "No changes to commit"
|
||||
else
|
||||
# Commit changes
|
||||
if git commit -m "$NEW_VERSION - $COMMIT_MESSAGE" > /dev/null 2>&1; then
|
||||
print_success "Committed changes"
|
||||
else
|
||||
print_error "Failed to commit changes"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create new git tag
|
||||
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Created tag: $NEW_VERSION"
|
||||
else
|
||||
print_warning "Tag $NEW_VERSION already exists"
|
||||
fi
|
||||
|
||||
# Push changes and tags
|
||||
print_status "Pushing to remote repository..."
|
||||
if git push > /dev/null 2>&1; then
|
||||
print_success "Pushed changes"
|
||||
else
|
||||
print_error "Failed to push changes"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Push only the new tag to avoid conflicts with existing tags
|
||||
if git push origin "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Pushed tag: $NEW_VERSION"
|
||||
else
|
||||
print_warning "Tag push failed, trying force push..."
|
||||
if git push --force origin "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Force-pushed updated tag: $NEW_VERSION"
|
||||
else
|
||||
print_error "Failed to push tag: $NEW_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to commit and push changes without creating a tag (tag already created)
|
||||
git_commit_and_push_no_tag() {
|
||||
print_status "Preparing git commit..."
|
||||
|
||||
# Stage all changes
|
||||
if git add . > /dev/null 2>&1; then
|
||||
print_success "Staged all changes"
|
||||
else
|
||||
print_error "Failed to stage changes"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if there are changes to commit
|
||||
if git diff --staged --quiet; then
|
||||
print_warning "No changes to commit"
|
||||
else
|
||||
# Commit changes
|
||||
if git commit -m "$NEW_VERSION - $COMMIT_MESSAGE" > /dev/null 2>&1; then
|
||||
print_success "Committed changes"
|
||||
else
|
||||
print_error "Failed to commit changes"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Push changes and tags
|
||||
print_status "Pushing to remote repository..."
|
||||
if git push > /dev/null 2>&1; then
|
||||
print_success "Pushed changes"
|
||||
else
|
||||
print_error "Failed to push changes"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Push only the new tag to avoid conflicts with existing tags
|
||||
if git push origin "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Pushed tag: $NEW_VERSION"
|
||||
else
|
||||
print_warning "Tag push failed, trying force push..."
|
||||
if git push --force origin "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Force-pushed updated tag: $NEW_VERSION"
|
||||
else
|
||||
print_error "Failed to push tag: $NEW_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to create Gitea release
|
||||
create_gitea_release() {
|
||||
print_status "Creating Gitea release..."
|
||||
|
||||
# Check for Gitea token
|
||||
if [[ ! -f "$HOME/.gitea_token" ]]; then
|
||||
print_warning "No ~/.gitea_token found. Skipping release creation."
|
||||
print_warning "Create ~/.gitea_token with your Gitea access token to enable releases."
|
||||
return 0
|
||||
fi
|
||||
|
||||
local token=$(cat "$HOME/.gitea_token" | tr -d '\n\r')
|
||||
local api_url="https://git.laantungir.net/api/v1/repos/laantungir/c-relay"
|
||||
|
||||
# Create release
|
||||
print_status "Creating release $NEW_VERSION..."
|
||||
local response=$(curl -s -X POST "$api_url/releases" \
|
||||
-H "Authorization: token $token" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"tag_name\": \"$NEW_VERSION\", \"name\": \"$NEW_VERSION\", \"body\": \"$COMMIT_MESSAGE\"}")
|
||||
|
||||
local upload_result=false
|
||||
|
||||
if echo "$response" | grep -q '"id"'; then
|
||||
print_success "Created release $NEW_VERSION"
|
||||
if upload_release_binaries "$api_url" "$token"; then
|
||||
upload_result=true
|
||||
fi
|
||||
elif echo "$response" | grep -q "already exists"; then
|
||||
print_warning "Release $NEW_VERSION already exists"
|
||||
if upload_release_binaries "$api_url" "$token"; then
|
||||
upload_result=true
|
||||
fi
|
||||
else
|
||||
print_error "Failed to create release $NEW_VERSION"
|
||||
print_error "Response: $response"
|
||||
|
||||
# Try to check if the release exists anyway
|
||||
print_status "Checking if release exists..."
|
||||
local check_response=$(curl -s -H "Authorization: token $token" "$api_url/releases/tags/$NEW_VERSION")
|
||||
if echo "$check_response" | grep -q '"id"'; then
|
||||
print_warning "Release exists but creation response was unexpected"
|
||||
if upload_release_binaries "$api_url" "$token"; then
|
||||
upload_result=true
|
||||
fi
|
||||
else
|
||||
print_error "Release does not exist and creation failed"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Return based on upload success
|
||||
if [[ "$upload_result" == true ]]; then
|
||||
return 0
|
||||
else
|
||||
print_error "Binary upload failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to upload release binaries
|
||||
upload_release_binaries() {
|
||||
local api_url="$1"
|
||||
local token="$2"
|
||||
local upload_success=true
|
||||
|
||||
# Get release ID with more robust parsing
|
||||
print_status "Getting release ID for $NEW_VERSION..."
|
||||
local response=$(curl -s -H "Authorization: token $token" "$api_url/releases/tags/$NEW_VERSION")
|
||||
local release_id=$(echo "$response" | grep -o '"id":[0-9]*' | head -n1 | cut -d: -f2)
|
||||
|
||||
if [[ -z "$release_id" ]]; then
|
||||
print_error "Could not get release ID for $NEW_VERSION"
|
||||
print_error "API Response: $response"
|
||||
|
||||
# Try to list all releases to debug
|
||||
print_status "Available releases:"
|
||||
curl -s -H "Authorization: token $token" "$api_url/releases" | grep -o '"tag_name":"[^"]*"' | head -5
|
||||
return 1
|
||||
fi
|
||||
|
||||
print_success "Found release ID: $release_id"
|
||||
|
||||
# Upload x86_64 binary
|
||||
if [[ -f "c-relay-x86_64" ]]; then
|
||||
print_status "Uploading x86_64 binary..."
|
||||
local upload_response=$(curl -s -w "\n%{http_code}" -X POST "$api_url/releases/$release_id/assets" \
|
||||
-H "Authorization: token $token" \
|
||||
-F "attachment=@c-relay-x86_64;filename=c-relay-${NEW_VERSION}-linux-x86_64")
|
||||
|
||||
local http_code=$(echo "$upload_response" | tail -n1)
|
||||
local response_body=$(echo "$upload_response" | head -n -1)
|
||||
|
||||
if [[ "$http_code" == "201" ]]; then
|
||||
print_success "Uploaded x86_64 binary successfully"
|
||||
else
|
||||
print_error "Failed to upload x86_64 binary (HTTP $http_code)"
|
||||
print_error "Response: $response_body"
|
||||
upload_success=false
|
||||
fi
|
||||
else
|
||||
print_warning "x86_64 binary not found: c-relay-x86_64"
|
||||
fi
|
||||
|
||||
# Upload ARM64 binary
|
||||
if [[ -f "c-relay-arm64" ]]; then
|
||||
print_status "Uploading ARM64 binary..."
|
||||
local upload_response=$(curl -s -w "\n%{http_code}" -X POST "$api_url/releases/$release_id/assets" \
|
||||
-H "Authorization: token $token" \
|
||||
-F "attachment=@c-relay-arm64;filename=c-relay-${NEW_VERSION}-linux-arm64")
|
||||
|
||||
local http_code=$(echo "$upload_response" | tail -n1)
|
||||
local response_body=$(echo "$upload_response" | head -n -1)
|
||||
|
||||
if [[ "$http_code" == "201" ]]; then
|
||||
print_success "Uploaded ARM64 binary successfully"
|
||||
else
|
||||
print_error "Failed to upload ARM64 binary (HTTP $http_code)"
|
||||
print_error "Response: $response_body"
|
||||
upload_success=false
|
||||
fi
|
||||
else
|
||||
print_warning "ARM64 binary not found: c-relay-arm64"
|
||||
fi
|
||||
|
||||
# Upload static x86_64 binary
|
||||
if [[ -f "c-relay-static-x86_64" ]]; then
|
||||
print_status "Uploading static x86_64 binary..."
|
||||
local upload_response=$(curl -s -w "\n%{http_code}" -X POST "$api_url/releases/$release_id/assets" \
|
||||
-H "Authorization: token $token" \
|
||||
-F "attachment=@c-relay-static-x86_64;filename=c-relay-${NEW_VERSION}-linux-x86_64-static")
|
||||
|
||||
local http_code=$(echo "$upload_response" | tail -n1)
|
||||
local response_body=$(echo "$upload_response" | head -n -1)
|
||||
|
||||
if [[ "$http_code" == "201" ]]; then
|
||||
print_success "Uploaded static x86_64 binary successfully"
|
||||
else
|
||||
print_error "Failed to upload static x86_64 binary (HTTP $http_code)"
|
||||
print_error "Response: $response_body"
|
||||
upload_success=false
|
||||
fi
|
||||
else
|
||||
print_warning "Static x86_64 binary not found: c-relay-static-x86_64"
|
||||
fi
|
||||
|
||||
# Upload static ARM64 binary
|
||||
if [[ -f "c-relay-static-arm64" ]]; then
|
||||
print_status "Uploading static ARM64 binary..."
|
||||
local upload_response=$(curl -s -w "\n%{http_code}" -X POST "$api_url/releases/$release_id/assets" \
|
||||
-H "Authorization: token $token" \
|
||||
-F "attachment=@c-relay-static-arm64;filename=c-relay-${NEW_VERSION}-linux-arm64-static")
|
||||
|
||||
local http_code=$(echo "$upload_response" | tail -n1)
|
||||
local response_body=$(echo "$upload_response" | head -n -1)
|
||||
|
||||
if [[ "$http_code" == "201" ]]; then
|
||||
print_success "Uploaded static ARM64 binary successfully"
|
||||
else
|
||||
print_error "Failed to upload static ARM64 binary (HTTP $http_code)"
|
||||
print_error "Response: $response_body"
|
||||
upload_success=false
|
||||
fi
|
||||
else
|
||||
print_warning "Static ARM64 binary not found: c-relay-static-arm64"
|
||||
fi
|
||||
|
||||
# Return success/failure status
|
||||
if [[ "$upload_success" == true ]]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to clean up release binaries
|
||||
cleanup_release_binaries() {
|
||||
local force_cleanup="$1" # Optional parameter to force cleanup even on failure
|
||||
|
||||
if [[ "$force_cleanup" == "force" ]] || [[ "$upload_success" == true ]]; then
|
||||
if [[ -f "c-relay-x86_64" ]]; then
|
||||
rm -f c-relay-x86_64
|
||||
print_status "Cleaned up x86_64 binary"
|
||||
fi
|
||||
if [[ -f "c-relay-arm64" ]]; then
|
||||
rm -f c-relay-arm64
|
||||
print_status "Cleaned up ARM64 binary"
|
||||
fi
|
||||
if [[ -f "c-relay-static-x86_64" ]]; then
|
||||
rm -f c-relay-static-x86_64
|
||||
print_status "Cleaned up static x86_64 binary"
|
||||
fi
|
||||
if [[ -f "c-relay-static-arm64" ]]; then
|
||||
rm -f c-relay-static-arm64
|
||||
print_status "Cleaned up static ARM64 binary"
|
||||
fi
|
||||
else
|
||||
print_warning "Keeping binary files due to upload failures"
|
||||
print_status "Files available for manual upload:"
|
||||
if [[ -f "c-relay-x86_64" ]]; then
|
||||
print_status " - c-relay-x86_64"
|
||||
fi
|
||||
if [[ -f "c-relay-arm64" ]]; then
|
||||
print_status " - c-relay-arm64"
|
||||
fi
|
||||
if [[ -f "c-relay-static-x86_64" ]]; then
|
||||
print_status " - c-relay-static-x86_64"
|
||||
fi
|
||||
if [[ -f "c-relay-static-arm64" ]]; then
|
||||
print_status " - c-relay-static-arm64"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Main execution
|
||||
main() {
|
||||
print_status "C-Relay Build and Push Script"
|
||||
|
||||
# Check prerequisites
|
||||
check_git_repo
|
||||
|
||||
if [[ "$RELEASE_MODE" == true ]]; then
|
||||
print_status "=== RELEASE MODE ==="
|
||||
|
||||
# Increment minor version for releases
|
||||
increment_version "minor"
|
||||
|
||||
# Create new git tag BEFORE compilation so version.h picks it up
|
||||
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Created tag: $NEW_VERSION"
|
||||
else
|
||||
print_warning "Tag $NEW_VERSION already exists, removing and recreating..."
|
||||
git tag -d "$NEW_VERSION" > /dev/null 2>&1
|
||||
git tag "$NEW_VERSION" > /dev/null 2>&1
|
||||
fi
|
||||
|
||||
# Compile project first (will now pick up the new tag)
|
||||
compile_project
|
||||
|
||||
# Build release binaries
|
||||
build_release_binaries
|
||||
|
||||
# Commit and push (but skip tag creation since we already did it)
|
||||
git_commit_and_push_no_tag
|
||||
|
||||
# Create Gitea release with binaries
|
||||
if create_gitea_release; then
|
||||
print_success "Release $NEW_VERSION completed successfully!"
|
||||
print_status "Binaries uploaded to Gitea release"
|
||||
upload_success=true
|
||||
else
|
||||
print_error "Release creation or binary upload failed"
|
||||
upload_success=false
|
||||
fi
|
||||
|
||||
# Cleanup (only if upload was successful)
|
||||
cleanup_release_binaries
|
||||
|
||||
else
|
||||
print_status "=== DEFAULT MODE ==="
|
||||
|
||||
# Increment patch version for regular commits
|
||||
increment_version "patch"
|
||||
|
||||
# Create new git tag BEFORE compilation so version.h picks it up
|
||||
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Created tag: $NEW_VERSION"
|
||||
else
|
||||
print_warning "Tag $NEW_VERSION already exists, removing and recreating..."
|
||||
git tag -d "$NEW_VERSION" > /dev/null 2>&1
|
||||
git tag "$NEW_VERSION" > /dev/null 2>&1
|
||||
fi
|
||||
|
||||
# Compile project (will now pick up the new tag)
|
||||
compile_project
|
||||
|
||||
# Commit and push (but skip tag creation since we already did it)
|
||||
git_commit_and_push_no_tag
|
||||
|
||||
print_success "Build and push completed successfully!"
|
||||
print_status "Version $NEW_VERSION pushed to repository"
|
||||
fi
|
||||
}
|
||||
|
||||
# Execute main function
|
||||
main
|
||||
@@ -9,11 +9,21 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
BUILD_DIR="$SCRIPT_DIR/build"
|
||||
DOCKERFILE="$SCRIPT_DIR/Dockerfile.alpine-musl"
|
||||
|
||||
echo "=========================================="
|
||||
echo "C-Relay MUSL Static Binary Builder"
|
||||
echo "=========================================="
|
||||
# Parse command line arguments
|
||||
DEBUG_BUILD=false
|
||||
if [[ "$1" == "--debug" ]]; then
|
||||
DEBUG_BUILD=true
|
||||
echo "=========================================="
|
||||
echo "C-Relay MUSL Static Binary Builder (DEBUG MODE)"
|
||||
echo "=========================================="
|
||||
else
|
||||
echo "=========================================="
|
||||
echo "C-Relay MUSL Static Binary Builder (PRODUCTION MODE)"
|
||||
echo "=========================================="
|
||||
fi
|
||||
echo "Project directory: $SCRIPT_DIR"
|
||||
echo "Build directory: $BUILD_DIR"
|
||||
echo "Debug build: $DEBUG_BUILD"
|
||||
echo ""
|
||||
|
||||
# Create build directory
|
||||
@@ -83,6 +93,7 @@ echo ""
|
||||
|
||||
$DOCKER_CMD build \
|
||||
--platform "$PLATFORM" \
|
||||
--build-arg DEBUG_BUILD=$DEBUG_BUILD \
|
||||
-f "$DOCKERFILE" \
|
||||
-t c-relay-musl-builder:latest \
|
||||
--progress=plain \
|
||||
@@ -105,6 +116,7 @@ echo "=========================================="
|
||||
# Build the builder stage to extract the binary
|
||||
$DOCKER_CMD build \
|
||||
--platform "$PLATFORM" \
|
||||
--build-arg DEBUG_BUILD=$DEBUG_BUILD \
|
||||
--target builder \
|
||||
-f "$DOCKERFILE" \
|
||||
-t c-relay-static-builder-stage:latest \
|
||||
@@ -179,11 +191,16 @@ echo "=========================================="
|
||||
echo "Binary: $BUILD_DIR/$OUTPUT_NAME"
|
||||
echo "Size: $(du -h "$BUILD_DIR/$OUTPUT_NAME" | cut -f1)"
|
||||
echo "Platform: $PLATFORM"
|
||||
if [ "$DEBUG_BUILD" = true ]; then
|
||||
echo "Build Type: DEBUG (with symbols, no optimization)"
|
||||
else
|
||||
echo "Build Type: PRODUCTION (optimized, stripped)"
|
||||
fi
|
||||
if [ "$TRULY_STATIC" = true ]; then
|
||||
echo "Type: Fully static binary (Alpine MUSL-based)"
|
||||
echo "Linkage: Fully static binary (Alpine MUSL-based)"
|
||||
echo "Portability: Works on ANY Linux distribution"
|
||||
else
|
||||
echo "Type: Static binary (may have minimal dependencies)"
|
||||
echo "Linkage: Static binary (may have minimal dependencies)"
|
||||
fi
|
||||
echo ""
|
||||
echo "✓ Build complete!"
|
||||
|
||||
1
c_utils_lib
Submodule
@@ -1,3 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copy the binary to the deployment location
|
||||
cp build/c_relay_x86 ~/Storage/c_relay/crelay
|
||||
|
||||
# Copy the service file to systemd (use the main service file)
|
||||
sudo cp systemd/c-relay.service /etc/systemd/system/c-relay-local.service
|
||||
|
||||
# Reload systemd daemon to pick up the new service
|
||||
sudo systemctl daemon-reload
|
||||
|
||||
# Enable the service (if not already enabled)
|
||||
sudo systemctl enable c-relay-local.service
|
||||
|
||||
# Restart the service
|
||||
sudo systemctl restart c-relay-local.service
|
||||
|
||||
# Show service status
|
||||
sudo systemctl status c-relay-local.service --no-pager -l
|
||||
|
||||
28
deploy_lt.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/bin/bash
|
||||
|
||||
# C-Relay Static Binary Deployment Script
|
||||
# Deploys build/c_relay_static_x86_64 to server via ssh
|
||||
|
||||
set -e
|
||||
|
||||
# Configuration
|
||||
LOCAL_BINARY="build/c_relay_static_x86_64"
|
||||
REMOTE_BINARY_PATH="/usr/local/bin/c_relay/c_relay"
|
||||
SERVICE_NAME="c-relay"
|
||||
|
||||
# Create backup
|
||||
ssh ubuntu@laantungir.com "sudo cp '$REMOTE_BINARY_PATH' '${REMOTE_BINARY_PATH}.backup.$(date +%Y%m%d_%H%M%S)'" 2>/dev/null || true
|
||||
|
||||
# Upload binary to temp location
|
||||
scp "$LOCAL_BINARY" "ubuntu@laantungir.com:/tmp/c_relay.tmp"
|
||||
|
||||
# Install binary
|
||||
ssh ubuntu@laantungir.com "sudo mv '/tmp/c_relay.tmp' '$REMOTE_BINARY_PATH'"
|
||||
ssh ubuntu@laantungir.com "sudo chown c-relay:c-relay '$REMOTE_BINARY_PATH'"
|
||||
ssh ubuntu@laantungir.com "sudo chmod +x '$REMOTE_BINARY_PATH'"
|
||||
|
||||
# Reload systemd and restart service
|
||||
ssh ubuntu@laantungir.com "sudo systemctl daemon-reload"
|
||||
ssh ubuntu@laantungir.com "sudo systemctl restart '$SERVICE_NAME'"
|
||||
|
||||
echo "Deployment complete!"
|
||||
@@ -1,70 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# C-Relay Static Binary Deployment Script
|
||||
# Deploys build/c_relay_static_x86_64 to server via sshlt
|
||||
# Usage: ./deploy_static.sh [--debug-level=N] [-d=N]
|
||||
|
||||
set -e
|
||||
|
||||
# Configuration
|
||||
LOCAL_BINARY="build/c_relay_static_x86_64"
|
||||
REMOTE_BINARY_PATH="/usr/local/bin/c_relay/c_relay"
|
||||
SERVICE_NAME="c-relay"
|
||||
|
||||
# Default debug level
|
||||
DEBUG_LEVEL=0
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--debug-level=*)
|
||||
DEBUG_LEVEL="${1#*=}"
|
||||
shift
|
||||
;;
|
||||
-d=*)
|
||||
DEBUG_LEVEL="${1#*=}"
|
||||
shift
|
||||
;;
|
||||
--debug-level)
|
||||
DEBUG_LEVEL="$2"
|
||||
shift 2
|
||||
;;
|
||||
-d)
|
||||
DEBUG_LEVEL="$2"
|
||||
shift 2
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1"
|
||||
echo "Usage: $0 [--debug-level=N] [-d=N]"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Validate debug level
|
||||
if ! [[ "$DEBUG_LEVEL" =~ ^[0-5]$ ]]; then
|
||||
echo "Error: Debug level must be 0-5, got: $DEBUG_LEVEL"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Deploying with debug level: $DEBUG_LEVEL"
|
||||
|
||||
# Create backup
|
||||
ssh ubuntu@laantungir.com "sudo cp '$REMOTE_BINARY_PATH' '${REMOTE_BINARY_PATH}.backup.$(date +%Y%m%d_%H%M%S)'" 2>/dev/null || true
|
||||
|
||||
# Upload binary to temp location
|
||||
scp "$LOCAL_BINARY" "ubuntu@laantungir.com:/tmp/c_relay.tmp"
|
||||
|
||||
# Install binary
|
||||
ssh ubuntu@laantungir.com "sudo mv '/tmp/c_relay.tmp' '$REMOTE_BINARY_PATH'"
|
||||
ssh ubuntu@laantungir.com "sudo chown c-relay:c-relay '$REMOTE_BINARY_PATH'"
|
||||
ssh ubuntu@laantungir.com "sudo chmod +x '$REMOTE_BINARY_PATH'"
|
||||
|
||||
# Update systemd service environment variable
|
||||
ssh ubuntu@laantungir.com "sudo sed -i 's/Environment=DEBUG_LEVEL=.*/Environment=DEBUG_LEVEL=$DEBUG_LEVEL/' /etc/systemd/system/c-relay.service"
|
||||
|
||||
# Reload systemd and restart service
|
||||
ssh ubuntu@laantungir.com "sudo systemctl daemon-reload"
|
||||
ssh ubuntu@laantungir.com "sudo systemctl restart '$SERVICE_NAME'"
|
||||
|
||||
echo "Deployment complete!"
|
||||
457
docs/c_utils_lib_architecture.md
Normal file
@@ -0,0 +1,457 @@
|
||||
# c_utils_lib Architecture Plan
|
||||
|
||||
## Overview
|
||||
|
||||
`c_utils_lib` is a standalone C utility library designed to provide reusable, general-purpose functions for C projects. It serves as a learning repository and a practical toolkit for common C programming tasks.
|
||||
|
||||
## Design Philosophy
|
||||
|
||||
1. **Zero External Dependencies**: Only standard C library dependencies
|
||||
2. **Modular Design**: Each utility is independent and can be used separately
|
||||
3. **Learning-Oriented**: Well-documented code suitable for learning C
|
||||
4. **Production-Ready**: Battle-tested utilities from real projects
|
||||
5. **Cross-Platform**: Works on Linux, macOS, and other POSIX systems
|
||||
|
||||
## Repository Structure
|
||||
|
||||
```
|
||||
c_utils_lib/
|
||||
├── README.md # Main documentation
|
||||
├── LICENSE # MIT License
|
||||
├── VERSION # Current version (e.g., v0.1.0)
|
||||
├── build.sh # Build script
|
||||
├── Makefile # Build system
|
||||
├── .gitignore # Git ignore rules
|
||||
│
|
||||
├── include/ # Public headers
|
||||
│ ├── c_utils.h # Main header (includes all utilities)
|
||||
│ ├── debug.h # Debug/logging system
|
||||
│ ├── version.h # Version utilities
|
||||
│ ├── string_utils.h # String utilities (future)
|
||||
│ └── memory_utils.h # Memory utilities (future)
|
||||
│
|
||||
├── src/ # Implementation files
|
||||
│ ├── debug.c # Debug system implementation
|
||||
│ ├── version.c # Version utilities implementation
|
||||
│ ├── string_utils.c # String utilities (future)
|
||||
│ └── memory_utils.c # Memory utilities (future)
|
||||
│
|
||||
├── examples/ # Usage examples
|
||||
│ ├── debug_example.c # Debug system example
|
||||
│ ├── version_example.c # Version utilities example
|
||||
│ └── Makefile # Examples build system
|
||||
│
|
||||
├── tests/ # Unit tests
|
||||
│ ├── test_debug.c # Debug system tests
|
||||
│ ├── test_version.c # Version utilities tests
|
||||
│ ├── run_tests.sh # Test runner
|
||||
│ └── Makefile # Tests build system
|
||||
│
|
||||
└── docs/ # Additional documentation
|
||||
├── API.md # Complete API reference
|
||||
├── INTEGRATION.md # How to integrate into projects
|
||||
├── VERSIONING.md # Versioning system guide
|
||||
└── CONTRIBUTING.md # Contribution guidelines
|
||||
```
|
||||
|
||||
## Initial Utilities (v0.1.0)
|
||||
|
||||
### 1. Debug System (`debug.h`, `debug.c`)
|
||||
|
||||
**Purpose**: Unified logging and debugging system with configurable verbosity levels.
|
||||
|
||||
**Features**:
|
||||
- 5 debug levels: NONE, ERROR, WARN, INFO, DEBUG, TRACE
|
||||
- Timestamp formatting
|
||||
- File/line information at TRACE level
|
||||
- Macro-based API for zero-cost when disabled
|
||||
- Thread-safe (future enhancement)
|
||||
|
||||
**API**:
|
||||
```c
|
||||
// Initialization
|
||||
void debug_init(int level);
|
||||
|
||||
// Logging macros
|
||||
DEBUG_ERROR(format, ...);
|
||||
DEBUG_WARN(format, ...);
|
||||
DEBUG_INFO(format, ...);
|
||||
DEBUG_LOG(format, ...);
|
||||
DEBUG_TRACE(format, ...);
|
||||
|
||||
// Global debug level
|
||||
extern debug_level_t g_debug_level;
|
||||
```
|
||||
|
||||
**Usage Example**:
|
||||
```c
|
||||
#include <c_utils/debug.h>
|
||||
|
||||
int main() {
|
||||
debug_init(DEBUG_LEVEL_INFO);
|
||||
DEBUG_INFO("Application started");
|
||||
DEBUG_ERROR("Critical error: %s", error_msg);
|
||||
return 0;
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Version Utilities (`version.h`, `version.c`)
|
||||
|
||||
**Purpose**: Reusable versioning system for C projects using git tags.
|
||||
|
||||
**Features**:
|
||||
- Automatic version extraction from git tags
|
||||
- Semantic versioning support (MAJOR.MINOR.PATCH)
|
||||
- Version comparison functions
|
||||
- Header file generation for embedding version info
|
||||
- Build number tracking
|
||||
|
||||
**API**:
|
||||
```c
|
||||
// Version structure
|
||||
typedef struct {
|
||||
int major;
|
||||
int minor;
|
||||
int patch;
|
||||
char* git_hash;
|
||||
char* build_date;
|
||||
} version_info_t;
|
||||
|
||||
// Get version from git
|
||||
int version_get_from_git(version_info_t* version);
|
||||
|
||||
// Generate version header file
|
||||
int version_generate_header(const char* output_path, const char* prefix);
|
||||
|
||||
// Compare versions
|
||||
int version_compare(version_info_t* v1, version_info_t* v2);
|
||||
|
||||
// Format version string
|
||||
char* version_to_string(version_info_t* version);
|
||||
```
|
||||
|
||||
**Usage Example**:
|
||||
```c
|
||||
#include <c_utils/version.h>
|
||||
|
||||
// In your build system:
|
||||
version_generate_header("src/version.h", "MY_APP");
|
||||
|
||||
// In your code:
|
||||
#include "version.h"
|
||||
printf("Version: %s\n", MY_APP_VERSION);
|
||||
```
|
||||
|
||||
**Integration with Projects**:
|
||||
```bash
|
||||
# In project Makefile
|
||||
version.h:
|
||||
c_utils_lib/bin/generate_version src/version.h MY_PROJECT
|
||||
```
|
||||
|
||||
## Build System
|
||||
|
||||
### Static Library Output
|
||||
|
||||
```
|
||||
libc_utils.a # Static library for linking
|
||||
```
|
||||
|
||||
### Build Targets
|
||||
|
||||
```bash
|
||||
make # Build static library
|
||||
make examples # Build examples
|
||||
make test # Run tests
|
||||
make install # Install to system (optional)
|
||||
make clean # Clean build artifacts
|
||||
```
|
||||
|
||||
### Build Script (`build.sh`)
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# Simplified build script similar to nostr_core_lib
|
||||
|
||||
case "$1" in
|
||||
lib|"")
|
||||
make
|
||||
;;
|
||||
examples)
|
||||
make examples
|
||||
;;
|
||||
test)
|
||||
make test
|
||||
;;
|
||||
clean)
|
||||
make clean
|
||||
;;
|
||||
install)
|
||||
make install
|
||||
;;
|
||||
*)
|
||||
echo "Usage: ./build.sh [lib|examples|test|clean|install]"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
```
|
||||
|
||||
## Versioning System Design
|
||||
|
||||
### How It Works
|
||||
|
||||
1. **Git Tags as Source of Truth**
|
||||
- Version tags: `v0.1.0`, `v0.2.0`, etc.
|
||||
- Follows semantic versioning
|
||||
|
||||
2. **Automatic Header Generation**
|
||||
- Script reads git tags
|
||||
- Generates header with version macros
|
||||
- Includes build date and git hash
|
||||
|
||||
3. **Reusable Across Projects**
|
||||
- Each project calls `version_generate_header()`
|
||||
- Customizable prefix (e.g., `C_RELAY_VERSION`, `NOSTR_CORE_VERSION`)
|
||||
- No hardcoded version numbers in source
|
||||
|
||||
### Example Generated Header
|
||||
|
||||
```c
|
||||
// Auto-generated by c_utils_lib version system
|
||||
#ifndef MY_PROJECT_VERSION_H
|
||||
#define MY_PROJECT_VERSION_H
|
||||
|
||||
#define MY_PROJECT_VERSION "v0.1.0"
|
||||
#define MY_PROJECT_VERSION_MAJOR 0
|
||||
#define MY_PROJECT_VERSION_MINOR 1
|
||||
#define MY_PROJECT_VERSION_PATCH 0
|
||||
#define MY_PROJECT_GIT_HASH "a1b2c3d"
|
||||
#define MY_PROJECT_BUILD_DATE "2025-10-15"
|
||||
|
||||
#endif
|
||||
```
|
||||
|
||||
### Integration Pattern
|
||||
|
||||
```makefile
|
||||
# In consuming project's Makefile
|
||||
VERSION_SCRIPT = c_utils_lib/bin/generate_version
|
||||
|
||||
src/version.h: .git/refs/tags/*
|
||||
$(VERSION_SCRIPT) src/version.h MY_PROJECT
|
||||
|
||||
my_app: src/version.h src/main.c
|
||||
$(CC) src/main.c -o my_app -Ic_utils_lib/include -Lc_utils_lib -lc_utils
|
||||
```
|
||||
|
||||
## Future Utilities (Roadmap)
|
||||
|
||||
### String Utilities (`string_utils.h`)
|
||||
- Safe string operations (bounds checking)
|
||||
- String trimming, splitting, joining
|
||||
- Case conversion
|
||||
- Pattern matching helpers
|
||||
|
||||
### Memory Utilities (`memory_utils.h`)
|
||||
- Safe allocation wrappers
|
||||
- Memory pool management
|
||||
- Leak detection helpers (debug builds)
|
||||
- Arena allocators
|
||||
|
||||
### Configuration Utilities (`config_utils.h`)
|
||||
- INI file parsing
|
||||
- JSON configuration (using cJSON)
|
||||
- Environment variable helpers
|
||||
- Command-line argument parsing
|
||||
|
||||
### File Utilities (`file_utils.h`)
|
||||
- Safe file operations
|
||||
- Directory traversal
|
||||
- Path manipulation
|
||||
- File watching (inotify wrapper)
|
||||
|
||||
### Time Utilities (`time_utils.h`)
|
||||
- Timestamp formatting
|
||||
- Duration calculations
|
||||
- Timer utilities
|
||||
- Rate limiting helpers
|
||||
|
||||
## Integration Guide
|
||||
|
||||
### As Git Submodule
|
||||
|
||||
```bash
|
||||
# In your project
|
||||
git submodule add https://github.com/yourusername/c_utils_lib.git
|
||||
git submodule update --init --recursive
|
||||
|
||||
# Build the library
|
||||
cd c_utils_lib && ./build.sh lib && cd ..
|
||||
|
||||
# Update your Makefile
|
||||
INCLUDES += -Ic_utils_lib/include
|
||||
LIBS += -Lc_utils_lib -lc_utils
|
||||
```
|
||||
|
||||
### In Your Makefile
|
||||
|
||||
```makefile
|
||||
# Check if c_utils_lib is built
|
||||
c_utils_lib/libc_utils.a:
|
||||
cd c_utils_lib && ./build.sh lib
|
||||
|
||||
# Link against it
|
||||
my_app: c_utils_lib/libc_utils.a src/main.c
|
||||
$(CC) src/main.c -o my_app \
|
||||
-Ic_utils_lib/include \
|
||||
-Lc_utils_lib -lc_utils
|
||||
```
|
||||
|
||||
### In Your Code
|
||||
|
||||
```c
|
||||
// Option 1: Include everything
|
||||
#include <c_utils/c_utils.h>
|
||||
|
||||
// Option 2: Include specific utilities
|
||||
#include <c_utils/debug.h>
|
||||
#include <c_utils/version.h>
|
||||
|
||||
int main() {
|
||||
debug_init(DEBUG_LEVEL_INFO);
|
||||
DEBUG_INFO("Starting application version %s", MY_APP_VERSION);
|
||||
return 0;
|
||||
}
|
||||
```
|
||||
|
||||
## Migration Plan for c-relay
|
||||
|
||||
### Phase 1: Extract Debug System
|
||||
1. Create `c_utils_lib` repository
|
||||
2. Move [`debug.c`](../src/debug.c) and [`debug.h`](../src/debug.h)
|
||||
3. Create build system
|
||||
4. Add basic tests
|
||||
|
||||
### Phase 2: Add Versioning System
|
||||
1. Extract version generation logic from c-relay
|
||||
2. Create reusable version utilities
|
||||
3. Update c-relay to use new system
|
||||
4. Update nostr_core_lib to use new system
|
||||
|
||||
### Phase 3: Add as Submodule
|
||||
1. Add `c_utils_lib` as submodule to c-relay
|
||||
2. Update c-relay Makefile
|
||||
3. Update includes in c-relay source files
|
||||
4. Remove old debug files from c-relay
|
||||
|
||||
### Phase 4: Documentation & Examples
|
||||
1. Create comprehensive README
|
||||
2. Add usage examples
|
||||
3. Write integration guide
|
||||
4. Document API
|
||||
|
||||
## Benefits
|
||||
|
||||
### For c-relay
|
||||
- Cleaner separation of concerns
|
||||
- Reusable utilities across projects
|
||||
- Easier to maintain and test
|
||||
- Consistent logging across codebase
|
||||
|
||||
### For Learning C
|
||||
- Real-world utility implementations
|
||||
- Best practices examples
|
||||
- Modular design patterns
|
||||
- Build system examples
|
||||
|
||||
### For Future Projects
|
||||
- Drop-in utility library
|
||||
- Proven, tested code
|
||||
- Consistent patterns
|
||||
- Time savings
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Unit Tests
|
||||
- Test each utility independently
|
||||
- Mock external dependencies
|
||||
- Edge case coverage
|
||||
- Memory leak detection (valgrind)
|
||||
|
||||
### Integration Tests
|
||||
- Test with real projects (c-relay, nostr_core_lib)
|
||||
- Cross-platform testing
|
||||
- Performance benchmarks
|
||||
|
||||
### Continuous Integration
|
||||
- GitHub Actions for automated testing
|
||||
- Multiple compiler versions (gcc, clang)
|
||||
- Multiple platforms (Linux, macOS)
|
||||
- Static analysis (cppcheck, clang-tidy)
|
||||
|
||||
## Documentation Standards
|
||||
|
||||
### Code Documentation
|
||||
- Doxygen-style comments
|
||||
- Function purpose and parameters
|
||||
- Return value descriptions
|
||||
- Usage examples in comments
|
||||
|
||||
### API Documentation
|
||||
- Complete API reference in `docs/API.md`
|
||||
- Usage examples for each function
|
||||
- Common patterns and best practices
|
||||
- Migration guides
|
||||
|
||||
### Learning Resources
|
||||
- Detailed explanations of implementations
|
||||
- Links to relevant C standards
|
||||
- Common pitfalls and how to avoid them
|
||||
- Performance considerations
|
||||
|
||||
## License
|
||||
|
||||
MIT License - permissive and suitable for learning and commercial use.
|
||||
|
||||
## Version History
|
||||
|
||||
- **v0.1.0** (Planned)
|
||||
- Initial release
|
||||
- Debug system
|
||||
- Version utilities
|
||||
- Basic documentation
|
||||
|
||||
- **v0.2.0** (Future)
|
||||
- String utilities
|
||||
- Memory utilities
|
||||
- Enhanced documentation
|
||||
|
||||
- **v0.3.0** (Future)
|
||||
- Configuration utilities
|
||||
- File utilities
|
||||
- Time utilities
|
||||
|
||||
## Success Criteria
|
||||
|
||||
1. ✅ Successfully integrated into c-relay
|
||||
2. ✅ Successfully integrated into nostr_core_lib
|
||||
3. ✅ All tests passing
|
||||
4. ✅ Documentation complete
|
||||
5. ✅ Examples working
|
||||
6. ✅ Zero external dependencies (except standard library)
|
||||
7. ✅ Cross-platform compatibility verified
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. Create repository structure
|
||||
2. Implement debug system
|
||||
3. Implement version utilities
|
||||
4. Create build system
|
||||
5. Write tests
|
||||
6. Create documentation
|
||||
7. Integrate into c-relay
|
||||
8. Publish to GitHub
|
||||
|
||||
---
|
||||
|
||||
**Note**: This is a living document. Update as the library evolves and new utilities are added.
|
||||
621
docs/c_utils_lib_implementation_plan.md
Normal file
@@ -0,0 +1,621 @@
|
||||
# c_utils_lib Implementation Plan
|
||||
|
||||
## Overview
|
||||
|
||||
This document provides a step-by-step implementation plan for creating the `c_utils_lib` library and integrating it into the c-relay project.
|
||||
|
||||
## Phase 1: Repository Setup & Structure
|
||||
|
||||
### Step 1.1: Create Repository Structure
|
||||
|
||||
**Location**: Create outside c-relay project (sibling directory)
|
||||
|
||||
```bash
|
||||
# Create directory structure
|
||||
mkdir -p c_utils_lib/{include,src,examples,tests,docs,bin}
|
||||
cd c_utils_lib
|
||||
|
||||
# Create subdirectories
|
||||
mkdir -p include/c_utils
|
||||
mkdir -p tests/results
|
||||
```
|
||||
|
||||
### Step 1.2: Initialize Git Repository
|
||||
|
||||
```bash
|
||||
cd c_utils_lib
|
||||
git init
|
||||
git branch -M main
|
||||
```
|
||||
|
||||
### Step 1.3: Create Core Files
|
||||
|
||||
**Files to create**:
|
||||
1. `README.md` - Main documentation
|
||||
2. `LICENSE` - MIT License
|
||||
3. `VERSION` - Version file (v0.1.0)
|
||||
4. `.gitignore` - Git ignore rules
|
||||
5. `Makefile` - Build system
|
||||
6. `build.sh` - Build script
|
||||
|
||||
## Phase 2: Debug System Implementation
|
||||
|
||||
### Step 2.1: Move Debug Files
|
||||
|
||||
**Source files** (from c-relay):
|
||||
- `src/debug.c` → `c_utils_lib/src/debug.c`
|
||||
- `src/debug.h` → `c_utils_lib/include/c_utils/debug.h`
|
||||
|
||||
**Modifications needed**:
|
||||
1. Update header guard in `debug.h`:
|
||||
```c
|
||||
#ifndef C_UTILS_DEBUG_H
|
||||
#define C_UTILS_DEBUG_H
|
||||
```
|
||||
|
||||
2. No namespace changes needed (keep simple API)
|
||||
|
||||
3. Add header documentation:
|
||||
```c
|
||||
/**
|
||||
* @file debug.h
|
||||
* @brief Debug and logging system with configurable verbosity levels
|
||||
*
|
||||
* Provides a simple, efficient logging system with 5 levels:
|
||||
* - ERROR: Critical errors
|
||||
* - WARN: Warnings
|
||||
* - INFO: Informational messages
|
||||
* - DEBUG: Debug messages
|
||||
* - TRACE: Detailed trace with file:line info
|
||||
*/
|
||||
```
|
||||
|
||||
### Step 2.2: Create Main Header
|
||||
|
||||
**File**: `include/c_utils/c_utils.h`
|
||||
|
||||
```c
|
||||
#ifndef C_UTILS_H
|
||||
#define C_UTILS_H
|
||||
|
||||
/**
|
||||
* @file c_utils.h
|
||||
* @brief Main header for c_utils_lib - includes all utilities
|
||||
*
|
||||
* Include this header to access all c_utils_lib functionality.
|
||||
* Alternatively, include specific headers for modular usage.
|
||||
*/
|
||||
|
||||
// Version information
|
||||
#define C_UTILS_VERSION "v0.1.0"
|
||||
#define C_UTILS_VERSION_MAJOR 0
|
||||
#define C_UTILS_VERSION_MINOR 1
|
||||
#define C_UTILS_VERSION_PATCH 0
|
||||
|
||||
// Include all utilities
|
||||
#include "debug.h"
|
||||
#include "version.h"
|
||||
|
||||
#endif /* C_UTILS_H */
|
||||
```
|
||||
|
||||
## Phase 3: Version Utilities Implementation
|
||||
|
||||
### Step 3.1: Design Version API
|
||||
|
||||
**File**: `include/c_utils/version.h`
|
||||
|
||||
```c
|
||||
#ifndef C_UTILS_VERSION_H
|
||||
#define C_UTILS_VERSION_H
|
||||
|
||||
#include <time.h>
|
||||
|
||||
/**
|
||||
* @brief Version information structure
|
||||
*/
|
||||
typedef struct {
|
||||
int major;
|
||||
int minor;
|
||||
int patch;
|
||||
char git_hash[41]; // SHA-1 hash (40 chars + null)
|
||||
char build_date[32]; // ISO 8601 format
|
||||
char version_string[64]; // "vX.Y.Z" format
|
||||
} version_info_t;
|
||||
|
||||
/**
|
||||
* @brief Extract version from git tags
|
||||
* @param version Output version structure
|
||||
* @return 0 on success, -1 on error
|
||||
*/
|
||||
int version_get_from_git(version_info_t* version);
|
||||
|
||||
/**
|
||||
* @brief Generate version header file for a project
|
||||
* @param output_path Path to output header file
|
||||
* @param prefix Prefix for macros (e.g., "MY_APP")
|
||||
* @return 0 on success, -1 on error
|
||||
*/
|
||||
int version_generate_header(const char* output_path, const char* prefix);
|
||||
|
||||
/**
|
||||
* @brief Compare two versions
|
||||
* @return -1 if v1 < v2, 0 if equal, 1 if v1 > v2
|
||||
*/
|
||||
int version_compare(const version_info_t* v1, const version_info_t* v2);
|
||||
|
||||
/**
|
||||
* @brief Format version as string
|
||||
* @param version Version structure
|
||||
* @param buffer Output buffer
|
||||
* @param buffer_size Size of output buffer
|
||||
* @return Number of characters written
|
||||
*/
|
||||
int version_to_string(const version_info_t* version, char* buffer, size_t buffer_size);
|
||||
|
||||
#endif /* C_UTILS_VERSION_H */
|
||||
```
|
||||
|
||||
### Step 3.2: Implement Version Utilities
|
||||
|
||||
**File**: `src/version.c`
|
||||
|
||||
Key functions to implement:
|
||||
1. `version_get_from_git()` - Execute `git describe --tags` and parse
|
||||
2. `version_generate_header()` - Generate header file with macros
|
||||
3. `version_compare()` - Semantic version comparison
|
||||
4. `version_to_string()` - Format version string
|
||||
|
||||
### Step 3.3: Create Version Generation Script
|
||||
|
||||
**File**: `bin/generate_version`
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# Generate version header for a project
|
||||
|
||||
OUTPUT_FILE="$1"
|
||||
PREFIX="$2"
|
||||
|
||||
if [ -z "$OUTPUT_FILE" ] || [ -z "$PREFIX" ]; then
|
||||
echo "Usage: $0 <output_file> <prefix>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get version from git
|
||||
if [ -d .git ]; then
|
||||
VERSION=$(git describe --tags --always 2>/dev/null || echo "v0.0.0")
|
||||
GIT_HASH=$(git rev-parse --short HEAD 2>/dev/null || echo "unknown")
|
||||
else
|
||||
VERSION="v0.0.0"
|
||||
GIT_HASH="unknown"
|
||||
fi
|
||||
|
||||
# Parse version
|
||||
CLEAN_VERSION=$(echo "$VERSION" | sed 's/^v//' | cut -d- -f1)
|
||||
MAJOR=$(echo "$CLEAN_VERSION" | cut -d. -f1)
|
||||
MINOR=$(echo "$CLEAN_VERSION" | cut -d. -f2)
|
||||
PATCH=$(echo "$CLEAN_VERSION" | cut -d. -f3)
|
||||
BUILD_DATE=$(date -u +"%Y-%m-%d %H:%M:%S UTC")
|
||||
|
||||
# Generate header
|
||||
cat > "$OUTPUT_FILE" << EOF
|
||||
/* Auto-generated by c_utils_lib version system */
|
||||
/* DO NOT EDIT - This file is automatically generated */
|
||||
|
||||
#ifndef ${PREFIX}_VERSION_H
|
||||
#define ${PREFIX}_VERSION_H
|
||||
|
||||
#define ${PREFIX}_VERSION "v${CLEAN_VERSION}"
|
||||
#define ${PREFIX}_VERSION_MAJOR ${MAJOR}
|
||||
#define ${PREFIX}_VERSION_MINOR ${MINOR}
|
||||
#define ${PREFIX}_VERSION_PATCH ${PATCH}
|
||||
#define ${PREFIX}_GIT_HASH "${GIT_HASH}"
|
||||
#define ${PREFIX}_BUILD_DATE "${BUILD_DATE}"
|
||||
|
||||
#endif /* ${PREFIX}_VERSION_H */
|
||||
EOF
|
||||
|
||||
echo "Generated $OUTPUT_FILE with version v${CLEAN_VERSION}"
|
||||
```
|
||||
|
||||
## Phase 4: Build System
|
||||
|
||||
### Step 4.1: Create Makefile
|
||||
|
||||
**File**: `Makefile`
|
||||
|
||||
```makefile
|
||||
# c_utils_lib Makefile
|
||||
|
||||
CC = gcc
|
||||
AR = ar
|
||||
CFLAGS = -Wall -Wextra -std=c99 -O2 -g
|
||||
INCLUDES = -Iinclude
|
||||
|
||||
# Directories
|
||||
SRC_DIR = src
|
||||
INCLUDE_DIR = include
|
||||
BUILD_DIR = build
|
||||
EXAMPLES_DIR = examples
|
||||
TESTS_DIR = tests
|
||||
|
||||
# Source files
|
||||
SOURCES = $(wildcard $(SRC_DIR)/*.c)
|
||||
OBJECTS = $(SOURCES:$(SRC_DIR)/%.c=$(BUILD_DIR)/%.o)
|
||||
|
||||
# Output library
|
||||
LIBRARY = libc_utils.a
|
||||
|
||||
# Default target
|
||||
all: $(LIBRARY)
|
||||
|
||||
# Create build directory
|
||||
$(BUILD_DIR):
|
||||
mkdir -p $(BUILD_DIR)
|
||||
|
||||
# Compile source files
|
||||
$(BUILD_DIR)/%.o: $(SRC_DIR)/%.c | $(BUILD_DIR)
|
||||
$(CC) $(CFLAGS) $(INCLUDES) -c $< -o $@
|
||||
|
||||
# Create static library
|
||||
$(LIBRARY): $(OBJECTS)
|
||||
$(AR) rcs $@ $^
|
||||
@echo "Built $(LIBRARY)"
|
||||
|
||||
# Build examples
|
||||
examples: $(LIBRARY)
|
||||
$(MAKE) -C $(EXAMPLES_DIR)
|
||||
|
||||
# Run tests
|
||||
test: $(LIBRARY)
|
||||
$(MAKE) -C $(TESTS_DIR)
|
||||
$(TESTS_DIR)/run_tests.sh
|
||||
|
||||
# Install to system (optional)
|
||||
install: $(LIBRARY)
|
||||
install -d /usr/local/lib
|
||||
install -m 644 $(LIBRARY) /usr/local/lib/
|
||||
install -d /usr/local/include/c_utils
|
||||
install -m 644 $(INCLUDE_DIR)/c_utils/*.h /usr/local/include/c_utils/
|
||||
@echo "Installed to /usr/local"
|
||||
|
||||
# Uninstall from system
|
||||
uninstall:
|
||||
rm -f /usr/local/lib/$(LIBRARY)
|
||||
rm -rf /usr/local/include/c_utils
|
||||
@echo "Uninstalled from /usr/local"
|
||||
|
||||
# Clean build artifacts
|
||||
clean:
|
||||
rm -rf $(BUILD_DIR) $(LIBRARY)
|
||||
$(MAKE) -C $(EXAMPLES_DIR) clean 2>/dev/null || true
|
||||
$(MAKE) -C $(TESTS_DIR) clean 2>/dev/null || true
|
||||
|
||||
# Help
|
||||
help:
|
||||
@echo "c_utils_lib Build System"
|
||||
@echo ""
|
||||
@echo "Targets:"
|
||||
@echo " all Build static library (default)"
|
||||
@echo " examples Build examples"
|
||||
@echo " test Run tests"
|
||||
@echo " install Install to /usr/local"
|
||||
@echo " uninstall Remove from /usr/local"
|
||||
@echo " clean Clean build artifacts"
|
||||
@echo " help Show this help"
|
||||
|
||||
.PHONY: all examples test install uninstall clean help
|
||||
```
|
||||
|
||||
### Step 4.2: Create Build Script
|
||||
|
||||
**File**: `build.sh`
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# c_utils_lib build script
|
||||
|
||||
set -e
|
||||
|
||||
case "$1" in
|
||||
lib|"")
|
||||
echo "Building c_utils_lib..."
|
||||
make
|
||||
;;
|
||||
examples)
|
||||
echo "Building examples..."
|
||||
make examples
|
||||
;;
|
||||
test)
|
||||
echo "Running tests..."
|
||||
make test
|
||||
;;
|
||||
clean)
|
||||
echo "Cleaning..."
|
||||
make clean
|
||||
;;
|
||||
install)
|
||||
echo "Installing..."
|
||||
make install
|
||||
;;
|
||||
*)
|
||||
echo "Usage: ./build.sh [lib|examples|test|clean|install]"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Done!"
|
||||
```
|
||||
|
||||
## Phase 5: Examples & Tests
|
||||
|
||||
### Step 5.1: Create Debug Example
|
||||
|
||||
**File**: `examples/debug_example.c`
|
||||
|
||||
```c
|
||||
#include <c_utils/debug.h>
|
||||
|
||||
int main() {
|
||||
// Initialize with INFO level
|
||||
debug_init(DEBUG_LEVEL_INFO);
|
||||
|
||||
DEBUG_INFO("Application started");
|
||||
DEBUG_WARN("This is a warning");
|
||||
DEBUG_ERROR("This is an error");
|
||||
|
||||
// This won't print (level too high)
|
||||
DEBUG_LOG("This debug message won't show");
|
||||
|
||||
// Change level to DEBUG
|
||||
g_debug_level = DEBUG_LEVEL_DEBUG;
|
||||
DEBUG_LOG("Now debug messages show");
|
||||
|
||||
// Change to TRACE to see file:line info
|
||||
g_debug_level = DEBUG_LEVEL_TRACE;
|
||||
DEBUG_TRACE("Trace with file:line information");
|
||||
|
||||
return 0;
|
||||
}
|
||||
```
|
||||
|
||||
### Step 5.2: Create Version Example
|
||||
|
||||
**File**: `examples/version_example.c`
|
||||
|
||||
```c
|
||||
#include <c_utils/version.h>
|
||||
#include <stdio.h>
|
||||
|
||||
int main() {
|
||||
version_info_t version;
|
||||
|
||||
// Get version from git
|
||||
if (version_get_from_git(&version) == 0) {
|
||||
char version_str[64];
|
||||
version_to_string(&version, version_str, sizeof(version_str));
|
||||
|
||||
printf("Version: %s\n", version_str);
|
||||
printf("Git Hash: %s\n", version.git_hash);
|
||||
printf("Build Date: %s\n", version.build_date);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
```
|
||||
|
||||
### Step 5.3: Create Test Suite
|
||||
|
||||
**File**: `tests/test_debug.c`
|
||||
|
||||
```c
|
||||
#include <c_utils/debug.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
int test_debug_init() {
|
||||
debug_init(DEBUG_LEVEL_INFO);
|
||||
return (g_debug_level == DEBUG_LEVEL_INFO) ? 0 : -1;
|
||||
}
|
||||
|
||||
int test_debug_levels() {
|
||||
// Test that higher levels don't print at lower settings
|
||||
debug_init(DEBUG_LEVEL_ERROR);
|
||||
// Would need to capture stdout to verify
|
||||
return 0;
|
||||
}
|
||||
|
||||
int main() {
|
||||
int failed = 0;
|
||||
|
||||
printf("Running debug tests...\n");
|
||||
|
||||
if (test_debug_init() != 0) {
|
||||
printf("FAIL: test_debug_init\n");
|
||||
failed++;
|
||||
} else {
|
||||
printf("PASS: test_debug_init\n");
|
||||
}
|
||||
|
||||
if (test_debug_levels() != 0) {
|
||||
printf("FAIL: test_debug_levels\n");
|
||||
failed++;
|
||||
} else {
|
||||
printf("PASS: test_debug_levels\n");
|
||||
}
|
||||
|
||||
return failed;
|
||||
}
|
||||
```
|
||||
|
||||
## Phase 6: Documentation
|
||||
|
||||
### Step 6.1: Create README.md
|
||||
|
||||
Key sections:
|
||||
1. Overview and purpose
|
||||
2. Quick start guide
|
||||
3. Installation instructions
|
||||
4. Usage examples
|
||||
5. API reference (brief)
|
||||
6. Integration guide
|
||||
7. Contributing guidelines
|
||||
8. License
|
||||
|
||||
### Step 6.2: Create API Documentation
|
||||
|
||||
**File**: `docs/API.md`
|
||||
|
||||
Complete API reference with:
|
||||
- Function signatures
|
||||
- Parameter descriptions
|
||||
- Return values
|
||||
- Usage examples
|
||||
- Common patterns
|
||||
|
||||
### Step 6.3: Create Integration Guide
|
||||
|
||||
**File**: `docs/INTEGRATION.md`
|
||||
|
||||
How to integrate into projects:
|
||||
1. As git submodule
|
||||
2. Makefile integration
|
||||
3. Code examples
|
||||
4. Migration from standalone utilities
|
||||
|
||||
## Phase 7: Integration with c-relay
|
||||
|
||||
### Step 7.1: Add as Submodule
|
||||
|
||||
```bash
|
||||
cd /path/to/c-relay
|
||||
git submodule add <repo-url> c_utils_lib
|
||||
git submodule update --init --recursive
|
||||
```
|
||||
|
||||
### Step 7.2: Update c-relay Makefile
|
||||
|
||||
```makefile
|
||||
# Add to c-relay Makefile
|
||||
C_UTILS_LIB = c_utils_lib/libc_utils.a
|
||||
|
||||
# Update includes
|
||||
INCLUDES += -Ic_utils_lib/include
|
||||
|
||||
# Update libs
|
||||
LIBS += -Lc_utils_lib -lc_utils
|
||||
|
||||
# Add dependency
|
||||
$(C_UTILS_LIB):
|
||||
cd c_utils_lib && ./build.sh lib
|
||||
|
||||
# Update main target
|
||||
$(TARGET): $(C_UTILS_LIB) ...
|
||||
```
|
||||
|
||||
### Step 7.3: Update c-relay Source Files
|
||||
|
||||
**Changes needed**:
|
||||
|
||||
1. Update includes:
|
||||
```c
|
||||
// Old
|
||||
#include "debug.h"
|
||||
|
||||
// New
|
||||
#include <c_utils/debug.h>
|
||||
```
|
||||
|
||||
2. Remove old debug files:
|
||||
```bash
|
||||
git rm src/debug.c src/debug.h
|
||||
```
|
||||
|
||||
3. Update all files that use debug system:
|
||||
- `src/main.c`
|
||||
- `src/config.c`
|
||||
- `src/dm_admin.c`
|
||||
- `src/websockets.c`
|
||||
- `src/subscriptions.c`
|
||||
- Any other files using DEBUG_* macros
|
||||
|
||||
### Step 7.4: Test Integration
|
||||
|
||||
```bash
|
||||
cd c-relay
|
||||
make clean
|
||||
make
|
||||
./make_and_restart_relay.sh
|
||||
```
|
||||
|
||||
Verify:
|
||||
- Compilation succeeds
|
||||
- Debug output works correctly
|
||||
- No functionality regressions
|
||||
|
||||
## Phase 8: Version System Integration
|
||||
|
||||
### Step 8.1: Update c-relay Makefile for Versioning
|
||||
|
||||
```makefile
|
||||
# Add version generation
|
||||
src/version.h: .git/refs/tags/*
|
||||
c_utils_lib/bin/generate_version src/version.h C_RELAY
|
||||
|
||||
# Add dependency
|
||||
$(TARGET): src/version.h ...
|
||||
```
|
||||
|
||||
### Step 8.2: Update c-relay to Use Generated Version
|
||||
|
||||
Replace hardcoded version in `src/main.h` with:
|
||||
```c
|
||||
#include "version.h"
|
||||
// Use C_RELAY_VERSION instead of hardcoded VERSION
|
||||
```
|
||||
|
||||
## Timeline Estimate
|
||||
|
||||
- **Phase 1**: Repository Setup - 1 hour
|
||||
- **Phase 2**: Debug System - 2 hours
|
||||
- **Phase 3**: Version Utilities - 4 hours
|
||||
- **Phase 4**: Build System - 2 hours
|
||||
- **Phase 5**: Examples & Tests - 3 hours
|
||||
- **Phase 6**: Documentation - 3 hours
|
||||
- **Phase 7**: c-relay Integration - 2 hours
|
||||
- **Phase 8**: Version Integration - 2 hours
|
||||
|
||||
**Total**: ~19 hours
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] c_utils_lib builds successfully
|
||||
- [ ] All tests pass
|
||||
- [ ] Examples compile and run
|
||||
- [ ] c-relay integrates successfully
|
||||
- [ ] Debug output works in c-relay
|
||||
- [ ] Version generation works
|
||||
- [ ] Documentation complete
|
||||
- [ ] No regressions in c-relay functionality
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. Review this plan with stakeholders
|
||||
2. Create repository structure
|
||||
3. Implement debug system
|
||||
4. Implement version utilities
|
||||
5. Create build system
|
||||
6. Write tests and examples
|
||||
7. Create documentation
|
||||
8. Integrate into c-relay
|
||||
9. Test thoroughly
|
||||
10. Publish to GitHub
|
||||
|
||||
## Notes
|
||||
|
||||
- Keep the API simple and intuitive
|
||||
- Focus on zero external dependencies
|
||||
- Prioritize learning value in code comments
|
||||
- Make integration as easy as possible
|
||||
- Document everything thoroughly
|
||||
@@ -175,6 +175,18 @@ Configuration events follow the standard Nostr event format with kind 33334:
|
||||
- **Impact**: Allows some flexibility in expiration timing
|
||||
- **Example**: `"600"` (10 minute grace period)
|
||||
|
||||
### NIP-59 Gift Wrap Timestamp Configuration
|
||||
|
||||
#### `nip59_timestamp_max_delay_sec`
|
||||
- **Description**: Controls timestamp randomization for NIP-59 gift wraps
|
||||
- **Default**: `"0"` (no randomization)
|
||||
- **Range**: `0` to `604800` (7 days)
|
||||
- **Impact**: Affects compatibility with other Nostr clients for direct messaging
|
||||
- **Values**:
|
||||
- `"0"`: No randomization (maximum compatibility)
|
||||
- `"1-604800"`: Random timestamp between now and N seconds ago
|
||||
- **Example**: `"172800"` (2 days randomization for privacy)
|
||||
|
||||
## Configuration Examples
|
||||
|
||||
### Basic Relay Setup
|
||||
|
||||
@@ -1,358 +0,0 @@
|
||||
# Event-Based Configuration System Implementation Plan
|
||||
|
||||
## Overview
|
||||
|
||||
This document provides a detailed implementation plan for transitioning the C Nostr Relay from command line arguments and file-based configuration to a pure event-based configuration system using kind 33334 Nostr events stored directly in the database.
|
||||
|
||||
## Implementation Phases
|
||||
|
||||
### Phase 0: File Structure Preparation ✅ COMPLETED
|
||||
|
||||
#### 0.1 Backup and Prepare Files ✅ COMPLETED
|
||||
**Actions:**
|
||||
1. ✅ Rename `src/config.c` to `src/config.c.old` - DONE
|
||||
2. ✅ Rename `src/config.h` to `src/config.h.old` - DONE
|
||||
3. ✅ Create new empty `src/config.c` and `src/config.h` - DONE
|
||||
4. ✅ Create new `src/default_config_event.h` - DONE
|
||||
|
||||
### Phase 1: Database Schema and Core Infrastructure ✅ COMPLETED
|
||||
|
||||
#### 1.1 Update Database Naming System ✅ COMPLETED
|
||||
**File:** `src/main.c`, new `src/config.c`, new `src/config.h`
|
||||
|
||||
```c
|
||||
// New functions implemented: ✅
|
||||
char* get_database_name_from_relay_pubkey(const char* relay_pubkey);
|
||||
int create_database_with_relay_pubkey(const char* relay_pubkey);
|
||||
```
|
||||
|
||||
**Changes Completed:** ✅
|
||||
- ✅ Create completely new `src/config.c` and `src/config.h` files
|
||||
- ✅ Rename old files to `src/config.c.old` and `src/config.h.old`
|
||||
- ✅ Modify `init_database()` to use relay pubkey for database naming
|
||||
- ✅ Use `nostr_core_lib` functions for all keypair generation
|
||||
- ✅ Database path: `./<relay_pubkey>.nrdb`
|
||||
- ✅ Remove all database path command line argument handling
|
||||
|
||||
#### 1.2 Configuration Event Storage ✅ COMPLETED
|
||||
**File:** new `src/config.c`, new `src/default_config_event.h`
|
||||
|
||||
```c
|
||||
// Configuration functions implemented: ✅
|
||||
int store_config_event_in_database(const cJSON* event);
|
||||
cJSON* load_config_event_from_database(const char* relay_pubkey);
|
||||
```
|
||||
|
||||
**Changes Completed:** ✅
|
||||
- ✅ Create new `src/default_config_event.h` for default configuration values
|
||||
- ✅ Add functions to store/retrieve kind 33334 events from events table
|
||||
- ✅ Use `nostr_core_lib` functions for all event validation
|
||||
- ✅ Clean separation: default config values isolated in header file
|
||||
- ✅ Remove existing config table dependencies
|
||||
|
||||
### Phase 2: Event Processing Integration ✅ COMPLETED
|
||||
|
||||
#### 2.1 Real-time Configuration Processing ✅ COMPLETED
|
||||
**File:** `src/main.c` (event processing functions)
|
||||
|
||||
**Integration Points:** ✅ IMPLEMENTED
|
||||
```c
|
||||
// In existing event processing loop: ✅ IMPLEMENTED
|
||||
// Added kind 33334 event detection in main event loop
|
||||
if (kind_num == 33334) {
|
||||
if (handle_configuration_event(event, error_message, sizeof(error_message)) == 0) {
|
||||
// Configuration event processed successfully
|
||||
}
|
||||
}
|
||||
|
||||
// Configuration event processing implemented: ✅
|
||||
int process_configuration_event(const cJSON* event);
|
||||
int handle_configuration_event(cJSON* event, char* error_message, size_t error_size);
|
||||
```
|
||||
|
||||
#### 2.2 Configuration Application System ⚠️ PARTIALLY COMPLETED
|
||||
**File:** `src/config.c`
|
||||
|
||||
**Status:** Configuration access functions implemented, field handlers need completion
|
||||
```c
|
||||
// Configuration access implemented: ✅
|
||||
const char* get_config_value(const char* key);
|
||||
int get_config_int(const char* key, int default_value);
|
||||
int get_config_bool(const char* key, int default_value);
|
||||
|
||||
// Field handlers need implementation: ⏳ IN PROGRESS
|
||||
// Need to implement specific apply functions for runtime changes
|
||||
```
|
||||
|
||||
### Phase 3: First-Time Startup System ✅ COMPLETED
|
||||
|
||||
#### 3.1 Key Generation and Initial Setup ✅ COMPLETED
|
||||
**File:** new `src/config.c`, `src/default_config_event.h`
|
||||
|
||||
**Status:** ✅ FULLY IMPLEMENTED with secure /dev/urandom + nostr_core_lib validation
|
||||
|
||||
```c
|
||||
int first_time_startup_sequence() {
|
||||
// 1. Generate admin keypair using nostr_core_lib
|
||||
unsigned char admin_privkey_bytes[32];
|
||||
char admin_privkey[65], admin_pubkey[65];
|
||||
|
||||
if (nostr_generate_private_key(admin_privkey_bytes) != 0) {
|
||||
return -1;
|
||||
}
|
||||
nostr_bytes_to_hex(admin_privkey_bytes, 32, admin_privkey);
|
||||
|
||||
unsigned char admin_pubkey_bytes[32];
|
||||
if (nostr_ec_public_key_from_private_key(admin_privkey_bytes, admin_pubkey_bytes) != 0) {
|
||||
return -1;
|
||||
}
|
||||
nostr_bytes_to_hex(admin_pubkey_bytes, 32, admin_pubkey);
|
||||
|
||||
// 2. Generate relay keypair using nostr_core_lib
|
||||
unsigned char relay_privkey_bytes[32];
|
||||
char relay_privkey[65], relay_pubkey[65];
|
||||
|
||||
if (nostr_generate_private_key(relay_privkey_bytes) != 0) {
|
||||
return -1;
|
||||
}
|
||||
nostr_bytes_to_hex(relay_privkey_bytes, 32, relay_privkey);
|
||||
|
||||
unsigned char relay_pubkey_bytes[32];
|
||||
if (nostr_ec_public_key_from_private_key(relay_privkey_bytes, relay_pubkey_bytes) != 0) {
|
||||
return -1;
|
||||
}
|
||||
nostr_bytes_to_hex(relay_pubkey_bytes, 32, relay_pubkey);
|
||||
|
||||
// 3. Create database with relay pubkey name
|
||||
if (create_database_with_relay_pubkey(relay_pubkey) != 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// 4. Create initial configuration event using defaults from header
|
||||
cJSON* config_event = create_default_config_event(admin_privkey_bytes, relay_privkey, relay_pubkey);
|
||||
|
||||
// 5. Store configuration event in database
|
||||
store_config_event_in_database(config_event);
|
||||
|
||||
// 6. Print admin private key for user to save
|
||||
printf("=== SAVE THIS ADMIN PRIVATE KEY ===\n");
|
||||
printf("Admin Private Key: %s\n", admin_privkey);
|
||||
printf("===================================\n");
|
||||
|
||||
return 0;
|
||||
}
|
||||
```
|
||||
|
||||
#### 3.2 Database Detection Logic ✅ COMPLETED
|
||||
**File:** `src/main.c`
|
||||
|
||||
**Status:** ✅ FULLY IMPLEMENTED
|
||||
```c
|
||||
// Implemented functions: ✅
|
||||
char** find_existing_nrdb_files(void);
|
||||
char* extract_pubkey_from_filename(const char* filename);
|
||||
int is_first_time_startup(void);
|
||||
int first_time_startup_sequence(void);
|
||||
int startup_existing_relay(const char* relay_pubkey);
|
||||
```
|
||||
|
||||
### Phase 4: Legacy System Removal ✅ PARTIALLY COMPLETED
|
||||
|
||||
#### 4.1 Remove Command Line Arguments ✅ COMPLETED
|
||||
**File:** `src/main.c`
|
||||
|
||||
**Status:** ✅ COMPLETED
|
||||
- ✅ All argument parsing logic removed except --help and --version
|
||||
- ✅ `--port`, `--config-dir`, `--config-file`, `--database-path` handling removed
|
||||
- ✅ Environment variable override systems removed
|
||||
- ✅ Clean help and version functions implemented
|
||||
|
||||
#### 4.2 Remove Configuration File System ✅ COMPLETED
|
||||
**File:** `src/config.c`
|
||||
|
||||
**Status:** ✅ COMPLETED - New file created from scratch
|
||||
- ✅ All legacy file-based configuration functions removed
|
||||
- ✅ XDG configuration directory logic removed
|
||||
- ✅ Pure event-based system implemented
|
||||
|
||||
#### 4.3 Remove Legacy Database Tables ⏳ PENDING
|
||||
**File:** `src/sql_schema.h`
|
||||
|
||||
**Status:** ⏳ NEEDS COMPLETION
|
||||
```sql
|
||||
-- Still need to remove these tables:
|
||||
DROP TABLE IF EXISTS config;
|
||||
DROP TABLE IF EXISTS config_history;
|
||||
DROP TABLE IF EXISTS config_file_cache;
|
||||
DROP VIEW IF EXISTS active_config;
|
||||
```
|
||||
|
||||
### Phase 5: Configuration Management
|
||||
|
||||
#### 5.1 Configuration Field Mapping
|
||||
**File:** `src/config.c`
|
||||
|
||||
```c
|
||||
// Map configuration tags to current system
|
||||
static const config_field_handler_t config_handlers[] = {
|
||||
{"auth_enabled", 0, apply_auth_enabled},
|
||||
{"relay_port", 1, apply_relay_port}, // requires restart
|
||||
{"max_connections", 0, apply_max_connections},
|
||||
{"relay_description", 0, apply_relay_description},
|
||||
{"relay_contact", 0, apply_relay_contact},
|
||||
{"relay_pubkey", 1, apply_relay_pubkey}, // requires restart
|
||||
{"relay_privkey", 1, apply_relay_privkey}, // requires restart
|
||||
{"pow_min_difficulty", 0, apply_pow_difficulty},
|
||||
{"nip40_expiration_enabled", 0, apply_expiration_enabled},
|
||||
{"max_subscriptions_per_client", 0, apply_max_subscriptions},
|
||||
{"max_event_tags", 0, apply_max_event_tags},
|
||||
{"max_content_length", 0, apply_max_content_length},
|
||||
{"default_limit", 0, apply_default_limit},
|
||||
{"max_limit", 0, apply_max_limit},
|
||||
// ... etc
|
||||
};
|
||||
```
|
||||
|
||||
#### 5.2 Startup Configuration Loading
|
||||
**File:** `src/main.c`
|
||||
|
||||
```c
|
||||
int startup_existing_relay(const char* relay_pubkey) {
|
||||
// 1. Open database
|
||||
if (init_database_with_pubkey(relay_pubkey) != 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// 2. Load configuration event from database
|
||||
cJSON* config_event = load_config_event_from_database(relay_pubkey);
|
||||
if (!config_event) {
|
||||
log_error("No configuration event found in database");
|
||||
return -1;
|
||||
}
|
||||
|
||||
// 3. Apply all configuration from event
|
||||
if (apply_configuration_from_event(config_event) != 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// 4. Continue with normal startup
|
||||
return start_relay_services();
|
||||
}
|
||||
```
|
||||
|
||||
## Implementation Order - PROGRESS STATUS
|
||||
|
||||
### Step 1: Core Infrastructure ✅ COMPLETED
|
||||
1. ✅ Implement database naming with relay pubkey
|
||||
2. ✅ Add key generation functions using `nostr_core_lib`
|
||||
3. ✅ Create configuration event storage/retrieval functions
|
||||
4. ✅ Test basic event creation and storage
|
||||
|
||||
### Step 2: Event Processing Integration ✅ MOSTLY COMPLETED
|
||||
1. ✅ Add kind 33334 event detection to event processing loop
|
||||
2. ✅ Implement configuration event validation
|
||||
3. ⚠️ Create configuration application handlers (basic access implemented, runtime handlers pending)
|
||||
4. ⏳ Test real-time configuration updates (infrastructure ready)
|
||||
|
||||
### Step 3: First-Time Startup ✅ COMPLETED
|
||||
1. ✅ Implement first-time startup detection
|
||||
2. ✅ Add automatic key generation and database creation
|
||||
3. ✅ Create default configuration event generation
|
||||
4. ✅ Test complete first-time startup flow
|
||||
|
||||
### Step 4: Legacy Removal ⚠️ MOSTLY COMPLETED
|
||||
1. ✅ Remove command line argument parsing
|
||||
2. ✅ Remove configuration file system
|
||||
3. ⏳ Remove legacy database tables (pending)
|
||||
4. ✅ Update all references to use event-based config
|
||||
|
||||
### Step 5: Testing and Validation ⚠️ PARTIALLY COMPLETED
|
||||
1. ✅ Test complete startup flow (first time and existing)
|
||||
2. ⏳ Test configuration updates via events (infrastructure ready)
|
||||
3. ⚠️ Test error handling and recovery (basic error handling implemented)
|
||||
4. ⏳ Performance testing and optimization (pending)
|
||||
|
||||
## Migration Strategy
|
||||
|
||||
### For Existing Installations
|
||||
Since the new system uses a completely different approach:
|
||||
|
||||
1. **No Automatic Migration**: The new system starts fresh
|
||||
2. **Manual Migration**: Users can manually copy configuration values
|
||||
3. **Documentation**: Provide clear migration instructions
|
||||
4. **Coexistence**: Old and new systems use different database names
|
||||
|
||||
### Migration Steps for Users
|
||||
1. Stop existing relay
|
||||
2. Note current configuration values
|
||||
3. Start new relay (generates keys and new database)
|
||||
4. Create kind 33334 event with desired configuration using admin private key
|
||||
5. Send event to relay to update configuration
|
||||
|
||||
## Testing Requirements
|
||||
|
||||
### Unit Tests
|
||||
- Key generation functions
|
||||
- Configuration event creation and validation
|
||||
- Database naming logic
|
||||
- Configuration application handlers
|
||||
|
||||
### Integration Tests
|
||||
- Complete first-time startup flow
|
||||
- Configuration update via events
|
||||
- Error handling scenarios
|
||||
- Database operations
|
||||
|
||||
### Performance Tests
|
||||
- Startup time comparison
|
||||
- Configuration update response time
|
||||
- Memory usage analysis
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **Admin Private Key**: Never stored, only printed once
|
||||
2. **Event Validation**: All configuration events must be signed by admin
|
||||
3. **Database Security**: Relay database contains relay private key
|
||||
4. **Key Generation**: Use `nostr_core_lib` for cryptographically secure generation
|
||||
|
||||
## Files to Modify
|
||||
|
||||
### Major Changes
|
||||
- `src/main.c` - Startup logic, event processing, argument removal
|
||||
- `src/config.c` - Complete rewrite for event-based configuration
|
||||
- `src/config.h` - Update function signatures and structures
|
||||
- `src/sql_schema.h` - Remove config tables
|
||||
|
||||
### Minor Changes
|
||||
- `Makefile` - Remove any config file generation
|
||||
- `systemd/` - Update service files if needed
|
||||
- Documentation updates
|
||||
|
||||
## Backwards Compatibility
|
||||
|
||||
**Breaking Changes:**
|
||||
- Command line arguments removed (except --help, --version)
|
||||
- Configuration files no longer used
|
||||
- Database naming scheme changed
|
||||
- Configuration table removed
|
||||
|
||||
**Migration Required:** This is a breaking change that requires manual migration for existing installations.
|
||||
|
||||
## Success Criteria - CURRENT STATUS
|
||||
|
||||
1. ✅ **Zero Command Line Arguments**: Relay starts with just `./c-relay`
|
||||
2. ✅ **Automatic First-Time Setup**: Generates keys and database automatically
|
||||
3. ⚠️ **Real-Time Configuration**: Infrastructure ready, handlers need completion
|
||||
4. ✅ **Single Database File**: All configuration and data in one `.nrdb` file
|
||||
5. ⚠️ **Admin Control**: Event processing implemented, signature validation ready
|
||||
6. ⚠️ **Clean Codebase**: Most legacy code removed, database tables cleanup pending
|
||||
|
||||
## Risk Mitigation
|
||||
|
||||
1. **Backup Strategy**: Document manual backup procedures for relay database
|
||||
2. **Key Loss Recovery**: Document recovery procedures if admin key is lost
|
||||
3. **Testing Coverage**: Comprehensive test suite before deployment
|
||||
4. **Rollback Plan**: Keep old version available during transition period
|
||||
5. **Documentation**: Comprehensive user and developer documentation
|
||||
|
||||
This implementation plan provides a clear path from the current system to the new event-based configuration architecture while maintaining security and reliability.
|
||||
298
docs/libwebsockets_proper_pattern.md
Normal file
@@ -0,0 +1,298 @@
|
||||
# Libwebsockets Proper Pattern - Message Queue Design
|
||||
|
||||
## Problem Analysis
|
||||
|
||||
### Current Violation
|
||||
We're calling `lws_write()` directly from multiple code paths:
|
||||
1. **Event broadcast** (subscriptions.c:667) - when events arrive
|
||||
2. **OK responses** (websockets.c:855) - when processing EVENT messages
|
||||
3. **EOSE responses** (websockets.c:976) - when processing REQ messages
|
||||
4. **COUNT responses** (websockets.c:1922) - when processing COUNT messages
|
||||
|
||||
This violates libwebsockets' design pattern which requires:
|
||||
- **`lws_write()` ONLY called from `LWS_CALLBACK_SERVER_WRITEABLE`**
|
||||
- Application queues messages and requests writeable callback
|
||||
- Libwebsockets handles write timing and socket buffer management
|
||||
|
||||
### Consequences of Violation
|
||||
1. Partial writes when socket buffer is full
|
||||
2. Multiple concurrent write attempts before callback fires
|
||||
3. "write already pending" errors with single buffer
|
||||
4. Frame corruption from interleaved partial writes
|
||||
5. "Invalid frame header" errors on client side
|
||||
|
||||
## Correct Architecture
|
||||
|
||||
### Message Queue Pattern
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ Application Layer │
|
||||
├─────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ Event Arrives → Queue Message → Request Writeable Callback │
|
||||
│ REQ Received → Queue EOSE → Request Writeable Callback │
|
||||
│ EVENT Received→ Queue OK → Request Writeable Callback │
|
||||
│ COUNT Received→ Queue COUNT → Request Writeable Callback │
|
||||
│ │
|
||||
└─────────────────────────────────────────────────────────────┘
|
||||
↓
|
||||
lws_callback_on_writable(wsi)
|
||||
↓
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ LWS_CALLBACK_SERVER_WRITEABLE │
|
||||
├─────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ 1. Dequeue next message from queue │
|
||||
│ 2. Call lws_write() with message data │
|
||||
│ 3. If queue not empty, request another callback │
|
||||
│ │
|
||||
└─────────────────────────────────────────────────────────────┘
|
||||
↓
|
||||
libwebsockets handles:
|
||||
- Socket buffer management
|
||||
- Partial write handling
|
||||
- Frame atomicity
|
||||
```
|
||||
|
||||
## Data Structures
|
||||
|
||||
### Message Queue Node
|
||||
```c
|
||||
typedef struct message_queue_node {
|
||||
unsigned char* data; // Message data (with LWS_PRE space)
|
||||
size_t length; // Message length (without LWS_PRE)
|
||||
enum lws_write_protocol type; // LWS_WRITE_TEXT, etc.
|
||||
struct message_queue_node* next;
|
||||
} message_queue_node_t;
|
||||
```
|
||||
|
||||
### Per-Session Data Updates
|
||||
```c
|
||||
struct per_session_data {
|
||||
// ... existing fields ...
|
||||
|
||||
// Message queue (replaces single buffer)
|
||||
message_queue_node_t* message_queue_head;
|
||||
message_queue_node_t* message_queue_tail;
|
||||
int message_queue_count;
|
||||
int writeable_requested; // Flag to prevent duplicate requests
|
||||
};
|
||||
```
|
||||
|
||||
## Implementation Functions
|
||||
|
||||
### 1. Queue Message (Application Layer)
|
||||
```c
|
||||
int queue_message(struct lws* wsi, struct per_session_data* pss,
|
||||
const char* message, size_t length,
|
||||
enum lws_write_protocol type)
|
||||
{
|
||||
// Allocate node
|
||||
message_queue_node_t* node = malloc(sizeof(message_queue_node_t));
|
||||
|
||||
// Allocate buffer with LWS_PRE space
|
||||
node->data = malloc(LWS_PRE + length);
|
||||
memcpy(node->data + LWS_PRE, message, length);
|
||||
node->length = length;
|
||||
node->type = type;
|
||||
node->next = NULL;
|
||||
|
||||
// Add to queue (FIFO)
|
||||
pthread_mutex_lock(&pss->session_lock);
|
||||
if (!pss->message_queue_head) {
|
||||
pss->message_queue_head = node;
|
||||
pss->message_queue_tail = node;
|
||||
} else {
|
||||
pss->message_queue_tail->next = node;
|
||||
pss->message_queue_tail = node;
|
||||
}
|
||||
pss->message_queue_count++;
|
||||
pthread_mutex_unlock(&pss->session_lock);
|
||||
|
||||
// Request writeable callback (only if not already requested)
|
||||
if (!pss->writeable_requested) {
|
||||
pss->writeable_requested = 1;
|
||||
lws_callback_on_writable(wsi);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Process Queue (Writeable Callback)
|
||||
```c
|
||||
int process_message_queue(struct lws* wsi, struct per_session_data* pss)
|
||||
{
|
||||
pthread_mutex_lock(&pss->session_lock);
|
||||
|
||||
// Get next message from queue
|
||||
message_queue_node_t* node = pss->message_queue_head;
|
||||
if (!node) {
|
||||
pss->writeable_requested = 0;
|
||||
pthread_mutex_unlock(&pss->session_lock);
|
||||
return 0; // Queue empty
|
||||
}
|
||||
|
||||
// Remove from queue
|
||||
pss->message_queue_head = node->next;
|
||||
if (!pss->message_queue_head) {
|
||||
pss->message_queue_tail = NULL;
|
||||
}
|
||||
pss->message_queue_count--;
|
||||
|
||||
pthread_mutex_unlock(&pss->session_lock);
|
||||
|
||||
// Write message (libwebsockets handles partial writes)
|
||||
int result = lws_write(wsi, node->data + LWS_PRE, node->length, node->type);
|
||||
|
||||
// Free node
|
||||
free(node->data);
|
||||
free(node);
|
||||
|
||||
// If queue not empty, request another callback
|
||||
pthread_mutex_lock(&pss->session_lock);
|
||||
if (pss->message_queue_head) {
|
||||
lws_callback_on_writable(wsi);
|
||||
} else {
|
||||
pss->writeable_requested = 0;
|
||||
}
|
||||
pthread_mutex_unlock(&pss->session_lock);
|
||||
|
||||
return (result < 0) ? -1 : 0;
|
||||
}
|
||||
```
|
||||
|
||||
## Refactoring Changes
|
||||
|
||||
### Before (WRONG - Direct Write)
|
||||
```c
|
||||
// websockets.c:855 - OK response
|
||||
int write_result = lws_write(wsi, buf + LWS_PRE, response_len, LWS_WRITE_TEXT);
|
||||
if (write_result < 0) {
|
||||
DEBUG_ERROR("Write failed");
|
||||
} else if ((size_t)write_result != response_len) {
|
||||
// Partial write - queue remaining data
|
||||
queue_websocket_write(wsi, pss, ...);
|
||||
}
|
||||
```
|
||||
|
||||
### After (CORRECT - Queue Message)
|
||||
```c
|
||||
// websockets.c:855 - OK response
|
||||
queue_message(wsi, pss, response_str, response_len, LWS_WRITE_TEXT);
|
||||
// That's it! Writeable callback will handle the actual write
|
||||
```
|
||||
|
||||
### Before (WRONG - Direct Write in Broadcast)
|
||||
```c
|
||||
// subscriptions.c:667 - EVENT broadcast
|
||||
int write_result = lws_write(current_temp->wsi, buf + LWS_PRE, msg_len, LWS_WRITE_TEXT);
|
||||
if (write_result < 0) {
|
||||
DEBUG_ERROR("Write failed");
|
||||
} else if ((size_t)write_result != msg_len) {
|
||||
queue_websocket_write(...);
|
||||
}
|
||||
```
|
||||
|
||||
### After (CORRECT - Queue Message)
|
||||
```c
|
||||
// subscriptions.c:667 - EVENT broadcast
|
||||
struct per_session_data* pss = lws_wsi_user(current_temp->wsi);
|
||||
queue_message(current_temp->wsi, pss, msg_str, msg_len, LWS_WRITE_TEXT);
|
||||
// Writeable callback will handle the actual write
|
||||
```
|
||||
|
||||
## Benefits of Correct Pattern
|
||||
|
||||
1. **No Partial Write Handling Needed**
|
||||
- Libwebsockets handles partial writes internally
|
||||
- We just queue complete messages
|
||||
|
||||
2. **No "Write Already Pending" Errors**
|
||||
- Queue can hold unlimited messages
|
||||
- Each processed sequentially from callback
|
||||
|
||||
3. **Thread Safety**
|
||||
- Queue operations protected by session lock
|
||||
- Write only from single callback thread
|
||||
|
||||
4. **Frame Atomicity**
|
||||
- Libwebsockets ensures complete frame transmission
|
||||
- No interleaved partial writes
|
||||
|
||||
5. **Simpler Code**
|
||||
- No complex partial write state machine
|
||||
- Just queue and forget
|
||||
|
||||
6. **Better Performance**
|
||||
- Libwebsockets optimizes write timing
|
||||
- Batches writes when socket ready
|
||||
|
||||
## Migration Steps
|
||||
|
||||
1. ✅ Identify all `lws_write()` call sites
|
||||
2. ✅ Confirm violation of libwebsockets pattern
|
||||
3. ⏳ Design message queue structure
|
||||
4. ⏳ Implement `queue_message()` function
|
||||
5. ⏳ Implement `process_message_queue()` function
|
||||
6. ⏳ Update `per_session_data` structure
|
||||
7. ⏳ Refactor OK response to use queue
|
||||
8. ⏳ Refactor EOSE response to use queue
|
||||
9. ⏳ Refactor COUNT response to use queue
|
||||
10. ⏳ Refactor EVENT broadcast to use queue
|
||||
11. ⏳ Update `LWS_CALLBACK_SERVER_WRITEABLE` handler
|
||||
12. ⏳ Add queue cleanup in `LWS_CALLBACK_CLOSED`
|
||||
13. ⏳ Remove old partial write code
|
||||
14. ⏳ Test with rapid multiple events
|
||||
15. ⏳ Test with large events (>4KB)
|
||||
16. ⏳ Test under load
|
||||
17. ⏳ Verify no frame errors
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Test 1: Multiple Rapid Events
|
||||
```bash
|
||||
# Send 10 events rapidly to same client
|
||||
for i in {1..10}; do
|
||||
echo '["EVENT",{"kind":1,"content":"test'$i'","created_at":'$(date +%s)',...}]' | \
|
||||
websocat ws://localhost:8888 &
|
||||
done
|
||||
```
|
||||
|
||||
**Expected**: All events queued and sent sequentially, no errors
|
||||
|
||||
### Test 2: Large Events
|
||||
```bash
|
||||
# Send event >4KB (forces multiple socket writes)
|
||||
nak event --content "$(head -c 5000 /dev/urandom | base64)" | \
|
||||
websocat ws://localhost:8888
|
||||
```
|
||||
|
||||
**Expected**: Event queued, libwebsockets handles partial writes internally
|
||||
|
||||
### Test 3: Concurrent Connections
|
||||
```bash
|
||||
# 100 concurrent connections, each sending events
|
||||
for i in {1..100}; do
|
||||
(echo '["REQ","sub'$i'",{}]'; sleep 1) | websocat ws://localhost:8888 &
|
||||
done
|
||||
```
|
||||
|
||||
**Expected**: All subscriptions work, events broadcast correctly
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- ✅ No `lws_write()` calls outside `LWS_CALLBACK_SERVER_WRITEABLE`
|
||||
- ✅ No "write already pending" errors in logs
|
||||
- ✅ No "Invalid frame header" errors on client side
|
||||
- ✅ All messages delivered in correct order
|
||||
- ✅ Large events (>4KB) handled correctly
|
||||
- ✅ Multiple rapid events to same client work
|
||||
- ✅ Concurrent connections stable under load
|
||||
|
||||
## References
|
||||
|
||||
- [libwebsockets documentation](https://libwebsockets.org/lws-api-doc-main/html/index.html)
|
||||
- [LWS_CALLBACK_SERVER_WRITEABLE](https://libwebsockets.org/lws-api-doc-main/html/group__callback-when-writeable.html)
|
||||
- [lws_callback_on_writable()](https://libwebsockets.org/lws-api-doc-main/html/group__callback-when-writeable.html#ga96f3ad8e1e2c3e0c8e0b0e5e5e5e5e5e)
|
||||
601
docs/monitoring_simplified_plan.md
Normal file
@@ -0,0 +1,601 @@
|
||||
# Simplified Monitoring Implementation Plan
|
||||
## Kind 34567 Event Kind Distribution Reporting
|
||||
|
||||
**Date:** 2025-10-16
|
||||
**Status:** Implementation Ready
|
||||
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
Simplified real-time monitoring system that:
|
||||
- Reports event kind distribution (which includes total event count)
|
||||
- Uses kind 34567 addressable events with `d=event_kinds`
|
||||
- Controlled by two config variables
|
||||
- Enabled on-demand when admin logs in
|
||||
- Uses simple throttling to prevent performance impact
|
||||
|
||||
---
|
||||
|
||||
## Configuration Variables
|
||||
|
||||
### Database Config Table
|
||||
|
||||
Add two new configuration keys:
|
||||
|
||||
```sql
|
||||
INSERT INTO config (key, value, data_type, description, category) VALUES
|
||||
('kind_34567_reporting_enabled', 'false', 'boolean',
|
||||
'Enable/disable kind 34567 event kind distribution reporting', 'monitoring'),
|
||||
('kind_34567_reporting_throttling_sec', '5', 'integer',
|
||||
'Minimum seconds between kind 34567 reports (throttling)', 'monitoring');
|
||||
```
|
||||
|
||||
### Configuration Access
|
||||
|
||||
```c
|
||||
// In src/monitoring.c or src/api.c
|
||||
int is_monitoring_enabled(void) {
|
||||
return get_config_bool("kind_34567_reporting_enabled", 0);
|
||||
}
|
||||
|
||||
int get_monitoring_throttle_seconds(void) {
|
||||
return get_config_int("kind_34567_reporting_throttling_sec", 5);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Event Structure
|
||||
|
||||
### Kind 34567 Event Format
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "<event_id>",
|
||||
"pubkey": "<relay_pubkey>",
|
||||
"created_at": 1697123456,
|
||||
"kind": 34567,
|
||||
"content": "{\"data_type\":\"event_kinds\",\"timestamp\":1697123456,\"data\":{\"total_events\":125000,\"distribution\":[{\"kind\":1,\"count\":45000,\"percentage\":36.0},{\"kind\":3,\"count\":12500,\"percentage\":10.0}]}}",
|
||||
"tags": [
|
||||
["d", "event_kinds"],
|
||||
["relay", "<relay_pubkey>"]
|
||||
],
|
||||
"sig": "<signature>"
|
||||
}
|
||||
```
|
||||
|
||||
### Content JSON Structure
|
||||
|
||||
```json
|
||||
{
|
||||
"data_type": "event_kinds",
|
||||
"timestamp": 1697123456,
|
||||
"data": {
|
||||
"total_events": 125000,
|
||||
"distribution": [
|
||||
{
|
||||
"kind": 1,
|
||||
"count": 45000,
|
||||
"percentage": 36.0
|
||||
},
|
||||
{
|
||||
"kind": 3,
|
||||
"count": 12500,
|
||||
"percentage": 10.0
|
||||
}
|
||||
]
|
||||
},
|
||||
"metadata": {
|
||||
"query_time_ms": 18
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Implementation
|
||||
|
||||
### File Structure
|
||||
|
||||
```
|
||||
src/
|
||||
monitoring.h # New file - monitoring system header
|
||||
monitoring.c # New file - monitoring implementation
|
||||
main.c # Modified - add trigger hook
|
||||
config.c # Modified - add config keys (or use migration)
|
||||
```
|
||||
|
||||
### 1. Header File: `src/monitoring.h`
|
||||
|
||||
```c
|
||||
#ifndef MONITORING_H
|
||||
#define MONITORING_H
|
||||
|
||||
#include <time.h>
|
||||
#include <cjson/cJSON.h>
|
||||
|
||||
// Initialize monitoring system
|
||||
int init_monitoring_system(void);
|
||||
|
||||
// Cleanup monitoring system
|
||||
void cleanup_monitoring_system(void);
|
||||
|
||||
// Called when an event is stored (from main.c)
|
||||
void monitoring_on_event_stored(void);
|
||||
|
||||
// Enable/disable monitoring (called from admin API)
|
||||
int set_monitoring_enabled(int enabled);
|
||||
|
||||
// Get monitoring status
|
||||
int is_monitoring_enabled(void);
|
||||
|
||||
// Get throttle interval
|
||||
int get_monitoring_throttle_seconds(void);
|
||||
|
||||
#endif /* MONITORING_H */
|
||||
```
|
||||
|
||||
### 2. Implementation: `src/monitoring.c`
|
||||
|
||||
```c
|
||||
#include "monitoring.h"
|
||||
#include "config.h"
|
||||
#include "debug.h"
|
||||
#include "../nostr_core_lib/nostr_core/nostr_core.h"
|
||||
#include <sqlite3.h>
|
||||
#include <string.h>
|
||||
#include <time.h>
|
||||
|
||||
// External references
|
||||
extern sqlite3* g_db;
|
||||
extern int broadcast_event_to_subscriptions(cJSON* event);
|
||||
extern int store_event(cJSON* event);
|
||||
extern const char* get_config_value(const char* key);
|
||||
extern int get_config_bool(const char* key, int default_value);
|
||||
extern int get_config_int(const char* key, int default_value);
|
||||
extern char* get_relay_private_key(void);
|
||||
|
||||
// Throttling state
|
||||
static time_t last_report_time = 0;
|
||||
|
||||
// Initialize monitoring system
|
||||
int init_monitoring_system(void) {
|
||||
DEBUG_LOG("Monitoring system initialized");
|
||||
last_report_time = 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Cleanup monitoring system
|
||||
void cleanup_monitoring_system(void) {
|
||||
DEBUG_LOG("Monitoring system cleaned up");
|
||||
}
|
||||
|
||||
// Check if monitoring is enabled
|
||||
int is_monitoring_enabled(void) {
|
||||
return get_config_bool("kind_34567_reporting_enabled", 0);
|
||||
}
|
||||
|
||||
// Get throttle interval
|
||||
int get_monitoring_throttle_seconds(void) {
|
||||
return get_config_int("kind_34567_reporting_throttling_sec", 5);
|
||||
}
|
||||
|
||||
// Enable/disable monitoring
|
||||
int set_monitoring_enabled(int enabled) {
|
||||
// Update config table
|
||||
const char* value = enabled ? "true" : "false";
|
||||
|
||||
// This would call update_config_in_table() or similar
|
||||
// For now, assume we have a function to update config
|
||||
extern int update_config_in_table(const char* key, const char* value);
|
||||
return update_config_in_table("kind_34567_reporting_enabled", value);
|
||||
}
|
||||
|
||||
// Query event kind distribution from database
|
||||
static char* query_event_kind_distribution(void) {
|
||||
if (!g_db) {
|
||||
DEBUG_ERROR("Database not available for monitoring query");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
struct timespec start_time;
|
||||
clock_gettime(CLOCK_MONOTONIC, &start_time);
|
||||
|
||||
// Query total events
|
||||
sqlite3_stmt* stmt;
|
||||
int total_events = 0;
|
||||
|
||||
if (sqlite3_prepare_v2(g_db, "SELECT COUNT(*) FROM events", -1, &stmt, NULL) == SQLITE_OK) {
|
||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
total_events = sqlite3_column_int(stmt, 0);
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
|
||||
// Query kind distribution
|
||||
cJSON* response = cJSON_CreateObject();
|
||||
cJSON_AddStringToObject(response, "data_type", "event_kinds");
|
||||
cJSON_AddNumberToObject(response, "timestamp", (double)time(NULL));
|
||||
|
||||
cJSON* data = cJSON_CreateObject();
|
||||
cJSON_AddNumberToObject(data, "total_events", total_events);
|
||||
|
||||
cJSON* distribution = cJSON_CreateArray();
|
||||
|
||||
const char* sql =
|
||||
"SELECT kind, COUNT(*) as count, "
|
||||
"ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM events), 2) as percentage "
|
||||
"FROM events GROUP BY kind ORDER BY count DESC";
|
||||
|
||||
if (sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL) == SQLITE_OK) {
|
||||
while (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
cJSON* kind_obj = cJSON_CreateObject();
|
||||
cJSON_AddNumberToObject(kind_obj, "kind", sqlite3_column_int(stmt, 0));
|
||||
cJSON_AddNumberToObject(kind_obj, "count", sqlite3_column_int64(stmt, 1));
|
||||
cJSON_AddNumberToObject(kind_obj, "percentage", sqlite3_column_double(stmt, 2));
|
||||
cJSON_AddItemToArray(distribution, kind_obj);
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
|
||||
cJSON_AddItemToObject(data, "distribution", distribution);
|
||||
cJSON_AddItemToObject(response, "data", data);
|
||||
|
||||
// Calculate query time
|
||||
struct timespec end_time;
|
||||
clock_gettime(CLOCK_MONOTONIC, &end_time);
|
||||
double query_time_ms = (end_time.tv_sec - start_time.tv_sec) * 1000.0 +
|
||||
(end_time.tv_nsec - start_time.tv_nsec) / 1000000.0;
|
||||
|
||||
cJSON* metadata = cJSON_CreateObject();
|
||||
cJSON_AddNumberToObject(metadata, "query_time_ms", query_time_ms);
|
||||
cJSON_AddItemToObject(response, "metadata", metadata);
|
||||
|
||||
char* json_string = cJSON_Print(response);
|
||||
cJSON_Delete(response);
|
||||
|
||||
return json_string;
|
||||
}
|
||||
|
||||
// Generate and broadcast kind 34567 event
|
||||
static int generate_monitoring_event(const char* json_content) {
|
||||
if (!json_content) return -1;
|
||||
|
||||
// Get relay keys
|
||||
const char* relay_pubkey = get_config_value("relay_pubkey");
|
||||
char* relay_privkey_hex = get_relay_private_key();
|
||||
if (!relay_pubkey || !relay_privkey_hex) {
|
||||
if (relay_privkey_hex) free(relay_privkey_hex);
|
||||
DEBUG_ERROR("Could not get relay keys for monitoring event");
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Convert relay private key to bytes
|
||||
unsigned char relay_privkey[32];
|
||||
if (nostr_hex_to_bytes(relay_privkey_hex, relay_privkey, sizeof(relay_privkey)) != 0) {
|
||||
free(relay_privkey_hex);
|
||||
DEBUG_ERROR("Failed to convert relay private key");
|
||||
return -1;
|
||||
}
|
||||
free(relay_privkey_hex);
|
||||
|
||||
// Create tags array
|
||||
cJSON* tags = cJSON_CreateArray();
|
||||
|
||||
// d tag for addressable event
|
||||
cJSON* d_tag = cJSON_CreateArray();
|
||||
cJSON_AddItemToArray(d_tag, cJSON_CreateString("d"));
|
||||
cJSON_AddItemToArray(d_tag, cJSON_CreateString("event_kinds"));
|
||||
cJSON_AddItemToArray(tags, d_tag);
|
||||
|
||||
// relay tag
|
||||
cJSON* relay_tag = cJSON_CreateArray();
|
||||
cJSON_AddItemToArray(relay_tag, cJSON_CreateString("relay"));
|
||||
cJSON_AddItemToArray(relay_tag, cJSON_CreateString(relay_pubkey));
|
||||
cJSON_AddItemToArray(tags, relay_tag);
|
||||
|
||||
// Create and sign event
|
||||
cJSON* event = nostr_create_and_sign_event(
|
||||
34567, // kind
|
||||
json_content, // content
|
||||
tags, // tags
|
||||
relay_privkey, // private key
|
||||
time(NULL) // timestamp
|
||||
);
|
||||
|
||||
if (!event) {
|
||||
DEBUG_ERROR("Failed to create and sign monitoring event");
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Broadcast to subscriptions
|
||||
broadcast_event_to_subscriptions(event);
|
||||
|
||||
// Store in database
|
||||
int result = store_event(event);
|
||||
|
||||
cJSON_Delete(event);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Called when an event is stored
|
||||
void monitoring_on_event_stored(void) {
|
||||
// Check if monitoring is enabled
|
||||
if (!is_monitoring_enabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check throttling
|
||||
time_t now = time(NULL);
|
||||
int throttle_seconds = get_monitoring_throttle_seconds();
|
||||
|
||||
if (now - last_report_time < throttle_seconds) {
|
||||
return; // Too soon, skip this update
|
||||
}
|
||||
|
||||
// Query event kind distribution
|
||||
char* json_content = query_event_kind_distribution();
|
||||
if (!json_content) {
|
||||
DEBUG_ERROR("Failed to query event kind distribution");
|
||||
return;
|
||||
}
|
||||
|
||||
// Generate and broadcast monitoring event
|
||||
int result = generate_monitoring_event(json_content);
|
||||
free(json_content);
|
||||
|
||||
if (result == 0) {
|
||||
last_report_time = now;
|
||||
DEBUG_LOG("Generated kind 34567 monitoring event");
|
||||
} else {
|
||||
DEBUG_ERROR("Failed to generate monitoring event");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Integration: Modify `src/main.c`
|
||||
|
||||
Add monitoring hook to event storage:
|
||||
|
||||
```c
|
||||
// At top of file
|
||||
#include "monitoring.h"
|
||||
|
||||
// In main() function, after init_database()
|
||||
if (init_monitoring_system() != 0) {
|
||||
DEBUG_WARN("Failed to initialize monitoring system");
|
||||
// Continue anyway - monitoring is optional
|
||||
}
|
||||
|
||||
// In store_event() function, after successful storage
|
||||
int store_event(cJSON* event) {
|
||||
// ... existing code ...
|
||||
|
||||
if (rc != SQLITE_DONE) {
|
||||
// ... error handling ...
|
||||
}
|
||||
|
||||
free(tags_json);
|
||||
|
||||
// Trigger monitoring update
|
||||
monitoring_on_event_stored();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// In cleanup section of main()
|
||||
cleanup_monitoring_system();
|
||||
```
|
||||
|
||||
### 4. Admin API: Enable/Disable Monitoring
|
||||
|
||||
Add admin command to enable monitoring (in `src/dm_admin.c` or `src/api.c`):
|
||||
|
||||
```c
|
||||
// Handle admin command to enable monitoring
|
||||
if (strcmp(command, "enable_monitoring") == 0) {
|
||||
set_monitoring_enabled(1);
|
||||
send_nip17_response(sender_pubkey,
|
||||
"✅ Kind 34567 monitoring enabled",
|
||||
error_msg, sizeof(error_msg));
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Handle admin command to disable monitoring
|
||||
if (strcmp(command, "disable_monitoring") == 0) {
|
||||
set_monitoring_enabled(0);
|
||||
send_nip17_response(sender_pubkey,
|
||||
"🔴 Kind 34567 monitoring disabled",
|
||||
error_msg, sizeof(error_msg));
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Handle admin command to set throttle interval
|
||||
if (strncmp(command, "set_monitoring_throttle ", 24) == 0) {
|
||||
int seconds = atoi(command + 24);
|
||||
if (seconds >= 1 && seconds <= 3600) {
|
||||
char value[16];
|
||||
snprintf(value, sizeof(value), "%d", seconds);
|
||||
update_config_in_table("kind_34567_reporting_throttling_sec", value);
|
||||
|
||||
char response[128];
|
||||
snprintf(response, sizeof(response),
|
||||
"✅ Monitoring throttle set to %d seconds", seconds);
|
||||
send_nip17_response(sender_pubkey, response, error_msg, sizeof(error_msg));
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Frontend Integration
|
||||
|
||||
### Admin Dashboard Subscription
|
||||
|
||||
```javascript
|
||||
// When admin logs in to dashboard
|
||||
async function enableMonitoring() {
|
||||
// Send admin command to enable monitoring
|
||||
await sendAdminCommand(['enable_monitoring']);
|
||||
|
||||
// Subscribe to kind 34567 events
|
||||
const subscription = {
|
||||
kinds: [34567],
|
||||
authors: [relayPubkey],
|
||||
"#d": ["event_kinds"]
|
||||
};
|
||||
|
||||
relay.subscribe([subscription], {
|
||||
onevent: (event) => {
|
||||
handleMonitoringEvent(event);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Handle incoming monitoring events
|
||||
function handleMonitoringEvent(event) {
|
||||
const content = JSON.parse(event.content);
|
||||
|
||||
if (content.data_type === 'event_kinds') {
|
||||
updateEventKindsChart(content.data);
|
||||
updateTotalEventsDisplay(content.data.total_events);
|
||||
}
|
||||
}
|
||||
|
||||
// When admin logs out or closes dashboard
|
||||
async function disableMonitoring() {
|
||||
await sendAdminCommand(['disable_monitoring']);
|
||||
}
|
||||
```
|
||||
|
||||
### Display Event Kind Distribution
|
||||
|
||||
```javascript
|
||||
function updateEventKindsChart(data) {
|
||||
const { total_events, distribution } = data;
|
||||
|
||||
// Update total events display
|
||||
document.getElementById('total-events').textContent =
|
||||
total_events.toLocaleString();
|
||||
|
||||
// Update chart/table with distribution
|
||||
const tableBody = document.getElementById('kind-distribution-table');
|
||||
tableBody.innerHTML = '';
|
||||
|
||||
distribution.forEach(item => {
|
||||
const row = document.createElement('tr');
|
||||
row.innerHTML = `
|
||||
<td>Kind ${item.kind}</td>
|
||||
<td>${item.count.toLocaleString()}</td>
|
||||
<td>${item.percentage}%</td>
|
||||
`;
|
||||
tableBody.appendChild(row);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Configuration Migration
|
||||
|
||||
### Add to Schema or Migration Script
|
||||
|
||||
```sql
|
||||
-- Add monitoring configuration
|
||||
INSERT INTO config (key, value, data_type, description, category) VALUES
|
||||
('kind_34567_reporting_enabled', 'false', 'boolean',
|
||||
'Enable/disable kind 34567 event kind distribution reporting', 'monitoring'),
|
||||
('kind_34567_reporting_throttling_sec', '5', 'integer',
|
||||
'Minimum seconds between kind 34567 reports (throttling)', 'monitoring');
|
||||
```
|
||||
|
||||
Or add to existing config initialization in `src/config.c`.
|
||||
|
||||
---
|
||||
|
||||
## Testing
|
||||
|
||||
### 1. Enable Monitoring
|
||||
|
||||
```bash
|
||||
# Via admin command (NIP-17 DM)
|
||||
echo '["enable_monitoring"]' | nak event --kind 14 --content - ws://localhost:8888
|
||||
```
|
||||
|
||||
### 2. Subscribe to Monitoring Events
|
||||
|
||||
```bash
|
||||
# Subscribe to kind 34567 events
|
||||
nak req --kinds 34567 --authors <relay_pubkey> ws://localhost:8888
|
||||
```
|
||||
|
||||
### 3. Generate Events
|
||||
|
||||
```bash
|
||||
# Send some test events to trigger monitoring
|
||||
for i in {1..10}; do
|
||||
nak event -c "Test event $i" ws://localhost:8888
|
||||
sleep 1
|
||||
done
|
||||
```
|
||||
|
||||
### 4. Verify Monitoring Events
|
||||
|
||||
You should see kind 34567 events every 5 seconds (or configured throttle interval) with event kind distribution.
|
||||
|
||||
---
|
||||
|
||||
## Performance Impact
|
||||
|
||||
### With 3 events/second (relay.damus.io scale)
|
||||
|
||||
**Query execution**:
|
||||
- Frequency: Every 5 seconds (throttled)
|
||||
- Query time: ~700ms (for 1M events)
|
||||
- Overhead: 700ms / 5000ms = 14% (acceptable)
|
||||
|
||||
**Per-event overhead**:
|
||||
- Check if enabled: < 0.01ms
|
||||
- Check throttle: < 0.01ms
|
||||
- Total: < 0.02ms per event (negligible)
|
||||
|
||||
**Overall impact**: < 1% on event processing, 14% on query thread (separate from event processing)
|
||||
|
||||
---
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
Once this is working, easy to add:
|
||||
|
||||
1. **More data types**: Add `d=connections`, `d=subscriptions`, etc.
|
||||
2. **Materialized counters**: Optimize queries for very large databases
|
||||
3. **Historical data**: Store monitoring events for trending
|
||||
4. **Alerts**: Trigger on thresholds (e.g., > 90% capacity)
|
||||
|
||||
---
|
||||
|
||||
## Summary
|
||||
|
||||
This simplified plan provides:
|
||||
|
||||
✅ **Single data type**: Event kind distribution (includes total events)
|
||||
✅ **Two config variables**: Enable/disable and throttle control
|
||||
✅ **On-demand activation**: Enabled when admin logs in
|
||||
✅ **Simple throttling**: Prevents performance impact
|
||||
✅ **Clean implementation**: ~200 lines of code
|
||||
✅ **Easy to extend**: Add more data types later
|
||||
|
||||
**Estimated implementation time**: 4-6 hours
|
||||
|
||||
**Files to create/modify**:
|
||||
- Create: `src/monitoring.h` (~30 lines)
|
||||
- Create: `src/monitoring.c` (~200 lines)
|
||||
- Modify: `src/main.c` (~10 lines)
|
||||
- Modify: `src/config.c` or migration (~5 lines)
|
||||
- Modify: `src/dm_admin.c` or `src/api.c` (~30 lines)
|
||||
- Create: `api/monitoring.js` (frontend, ~100 lines)
|
||||
|
||||
**Total new code**: ~375 lines
|
||||
517
docs/nip59_timestamp_configuration_plan.md
Normal file
@@ -0,0 +1,517 @@
|
||||
# NIP-59 Timestamp Configuration Implementation Plan
|
||||
|
||||
## Overview
|
||||
Add configurable timestamp randomization for NIP-59 gift wraps to improve compatibility with Nostr apps that don't implement timestamp randomization.
|
||||
|
||||
## Problem Statement
|
||||
The NIP-59 protocol specifies that timestamps on gift wraps should have randomness to prevent time-analysis attacks. However, some Nostr platforms don't implement this, causing compatibility issues with direct messaging (NIP-17).
|
||||
|
||||
## Solution
|
||||
Add a configuration parameter `nip59_timestamp_max_delay_sec` that controls the maximum random delay applied to timestamps:
|
||||
- **Value = 0**: Use current timestamp (no randomization) for maximum compatibility
|
||||
- **Value > 0**: Use random timestamp between now and N seconds ago
|
||||
- **Default = 0**: Maximum compatibility mode (no randomization)
|
||||
|
||||
## Implementation Approach: Option B (Direct Parameter Addition)
|
||||
We chose Option B because:
|
||||
1. Explicit and stateless - value flows through call chain
|
||||
2. Thread-safe by design
|
||||
3. No global state needed in nostr_core_lib
|
||||
4. DMs are sent rarely, so database query per call is acceptable
|
||||
|
||||
---
|
||||
|
||||
## Detailed Implementation Steps
|
||||
|
||||
### Phase 1: Configuration Setup in c-relay
|
||||
|
||||
#### 1.1 Add Configuration Parameter
|
||||
**File:** `src/default_config_event.h`
|
||||
**Location:** Line 82 (after `trust_proxy_headers`)
|
||||
|
||||
```c
|
||||
// NIP-59 Gift Wrap Timestamp Configuration
|
||||
{"nip59_timestamp_max_delay_sec", "0"} // Default: 0 (no randomization for compatibility)
|
||||
```
|
||||
|
||||
**Rationale:**
|
||||
- Default of 0 seconds (no randomization) for maximum compatibility
|
||||
- Placed after proxy settings, before closing brace
|
||||
- Follows existing naming convention
|
||||
|
||||
#### 1.2 Add Configuration Validation
|
||||
**File:** `src/config.c`
|
||||
**Function:** `validate_config_field()` (around line 923)
|
||||
|
||||
Add validation case:
|
||||
```c
|
||||
else if (strcmp(key, "nip59_timestamp_max_delay_sec") == 0) {
|
||||
long value = strtol(value_str, NULL, 10);
|
||||
if (value < 0 || value > 604800) { // Max 7 days
|
||||
snprintf(error_msg, error_size,
|
||||
"nip59_timestamp_max_delay_sec must be between 0 and 604800 (7 days)");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Rationale:**
|
||||
- 0 = no randomization (compatibility mode)
|
||||
- 604800 = 7 days maximum (reasonable upper bound)
|
||||
- Prevents negative values or excessive delays
|
||||
|
||||
---
|
||||
|
||||
### Phase 2: Modify nostr_core_lib Functions
|
||||
|
||||
#### 2.1 Update random_past_timestamp() Function
|
||||
**File:** `nostr_core_lib/nostr_core/nip059.c`
|
||||
**Current Location:** Lines 31-36
|
||||
|
||||
**Current Code:**
|
||||
```c
|
||||
static time_t random_past_timestamp(void) {
|
||||
time_t now = time(NULL);
|
||||
// Random time up to 2 days (172800 seconds) in the past
|
||||
long random_offset = (long)(rand() % 172800);
|
||||
return now - random_offset;
|
||||
}
|
||||
```
|
||||
|
||||
**New Code:**
|
||||
```c
|
||||
static time_t random_past_timestamp(long max_delay_sec) {
|
||||
time_t now = time(NULL);
|
||||
|
||||
// If max_delay_sec is 0, return current timestamp (no randomization)
|
||||
if (max_delay_sec == 0) {
|
||||
return now;
|
||||
}
|
||||
|
||||
// Random time up to max_delay_sec in the past
|
||||
long random_offset = (long)(rand() % max_delay_sec);
|
||||
return now - random_offset;
|
||||
}
|
||||
```
|
||||
|
||||
**Changes:**
|
||||
- Add `long max_delay_sec` parameter
|
||||
- Handle special case: `max_delay_sec == 0` returns current time
|
||||
- Use `max_delay_sec` instead of hardcoded 172800
|
||||
|
||||
#### 2.2 Update nostr_nip59_create_seal() Function
|
||||
**File:** `nostr_core_lib/nostr_core/nip059.c`
|
||||
**Current Location:** Lines 144-215
|
||||
|
||||
**Function Signature Change:**
|
||||
```c
|
||||
// OLD:
|
||||
cJSON* nostr_nip59_create_seal(cJSON* rumor,
|
||||
const unsigned char* sender_private_key,
|
||||
const unsigned char* recipient_public_key);
|
||||
|
||||
// NEW:
|
||||
cJSON* nostr_nip59_create_seal(cJSON* rumor,
|
||||
const unsigned char* sender_private_key,
|
||||
const unsigned char* recipient_public_key,
|
||||
long max_delay_sec);
|
||||
```
|
||||
|
||||
**Code Change at Line 181:**
|
||||
```c
|
||||
// OLD:
|
||||
time_t seal_time = random_past_timestamp();
|
||||
|
||||
// NEW:
|
||||
time_t seal_time = random_past_timestamp(max_delay_sec);
|
||||
```
|
||||
|
||||
#### 2.3 Update nostr_nip59_create_gift_wrap() Function
|
||||
**File:** `nostr_core_lib/nostr_core/nip059.c`
|
||||
**Current Location:** Lines 220-323
|
||||
|
||||
**Function Signature Change:**
|
||||
```c
|
||||
// OLD:
|
||||
cJSON* nostr_nip59_create_gift_wrap(cJSON* seal,
|
||||
const char* recipient_public_key_hex);
|
||||
|
||||
// NEW:
|
||||
cJSON* nostr_nip59_create_gift_wrap(cJSON* seal,
|
||||
const char* recipient_public_key_hex,
|
||||
long max_delay_sec);
|
||||
```
|
||||
|
||||
**Code Change at Line 275:**
|
||||
```c
|
||||
// OLD:
|
||||
time_t wrap_time = random_past_timestamp();
|
||||
|
||||
// NEW:
|
||||
time_t wrap_time = random_past_timestamp(max_delay_sec);
|
||||
```
|
||||
|
||||
#### 2.4 Update nip059.h Header
|
||||
**File:** `nostr_core_lib/nostr_core/nip059.h`
|
||||
**Locations:** Lines 38-39 and 48
|
||||
|
||||
**Update Function Declarations:**
|
||||
```c
|
||||
// Line 38-39: Update nostr_nip59_create_seal
|
||||
cJSON* nostr_nip59_create_seal(cJSON* rumor,
|
||||
const unsigned char* sender_private_key,
|
||||
const unsigned char* recipient_public_key,
|
||||
long max_delay_sec);
|
||||
|
||||
// Line 48: Update nostr_nip59_create_gift_wrap
|
||||
cJSON* nostr_nip59_create_gift_wrap(cJSON* seal,
|
||||
const char* recipient_public_key_hex,
|
||||
long max_delay_sec);
|
||||
```
|
||||
|
||||
**Update Documentation Comments:**
|
||||
```c
|
||||
/**
|
||||
* NIP-59: Create a seal (kind 13) wrapping a rumor
|
||||
*
|
||||
* @param rumor The rumor event to seal (cJSON object)
|
||||
* @param sender_private_key 32-byte sender private key
|
||||
* @param recipient_public_key 32-byte recipient public key (x-only)
|
||||
* @param max_delay_sec Maximum random delay in seconds (0 = no randomization)
|
||||
* @return cJSON object representing the seal event, or NULL on error
|
||||
*/
|
||||
|
||||
/**
|
||||
* NIP-59: Create a gift wrap (kind 1059) wrapping a seal
|
||||
*
|
||||
* @param seal The seal event to wrap (cJSON object)
|
||||
* @param recipient_public_key_hex Recipient's public key in hex format
|
||||
* @param max_delay_sec Maximum random delay in seconds (0 = no randomization)
|
||||
* @return cJSON object representing the gift wrap event, or NULL on error
|
||||
*/
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Phase 3: Update NIP-17 Integration
|
||||
|
||||
#### 3.1 Update nostr_nip17_send_dm() Function
|
||||
**File:** `nostr_core_lib/nostr_core/nip017.c`
|
||||
**Current Location:** Lines 260-320
|
||||
|
||||
**Function Signature Change:**
|
||||
```c
|
||||
// OLD:
|
||||
int nostr_nip17_send_dm(cJSON* dm_event,
|
||||
const char** recipient_pubkeys,
|
||||
int num_recipients,
|
||||
const unsigned char* sender_private_key,
|
||||
cJSON** gift_wraps_out,
|
||||
int max_gift_wraps);
|
||||
|
||||
// NEW:
|
||||
int nostr_nip17_send_dm(cJSON* dm_event,
|
||||
const char** recipient_pubkeys,
|
||||
int num_recipients,
|
||||
const unsigned char* sender_private_key,
|
||||
cJSON** gift_wraps_out,
|
||||
int max_gift_wraps,
|
||||
long max_delay_sec);
|
||||
```
|
||||
|
||||
**Code Changes:**
|
||||
|
||||
At line 281 (seal creation):
|
||||
```c
|
||||
// OLD:
|
||||
cJSON* seal = nostr_nip59_create_seal(dm_event, sender_private_key, recipient_public_key);
|
||||
|
||||
// NEW:
|
||||
cJSON* seal = nostr_nip59_create_seal(dm_event, sender_private_key, recipient_public_key, max_delay_sec);
|
||||
```
|
||||
|
||||
At line 287 (gift wrap creation):
|
||||
```c
|
||||
// OLD:
|
||||
cJSON* gift_wrap = nostr_nip59_create_gift_wrap(seal, recipient_pubkeys[i]);
|
||||
|
||||
// NEW:
|
||||
cJSON* gift_wrap = nostr_nip59_create_gift_wrap(seal, recipient_pubkeys[i], max_delay_sec);
|
||||
```
|
||||
|
||||
At line 306 (sender seal creation):
|
||||
```c
|
||||
// OLD:
|
||||
cJSON* sender_seal = nostr_nip59_create_seal(dm_event, sender_private_key, sender_public_key);
|
||||
|
||||
// NEW:
|
||||
cJSON* sender_seal = nostr_nip59_create_seal(dm_event, sender_private_key, sender_public_key, max_delay_sec);
|
||||
```
|
||||
|
||||
At line 309 (sender gift wrap creation):
|
||||
```c
|
||||
// OLD:
|
||||
cJSON* sender_gift_wrap = nostr_nip59_create_gift_wrap(sender_seal, sender_pubkey_hex);
|
||||
|
||||
// NEW:
|
||||
cJSON* sender_gift_wrap = nostr_nip59_create_gift_wrap(sender_seal, sender_pubkey_hex, max_delay_sec);
|
||||
```
|
||||
|
||||
#### 3.2 Update nip017.h Header
|
||||
**File:** `nostr_core_lib/nostr_core/nip017.h`
|
||||
**Location:** Lines 102-107
|
||||
|
||||
**Update Function Declaration:**
|
||||
```c
|
||||
int nostr_nip17_send_dm(cJSON* dm_event,
|
||||
const char** recipient_pubkeys,
|
||||
int num_recipients,
|
||||
const unsigned char* sender_private_key,
|
||||
cJSON** gift_wraps_out,
|
||||
int max_gift_wraps,
|
||||
long max_delay_sec);
|
||||
```
|
||||
|
||||
**Update Documentation Comment (lines 88-100):**
|
||||
```c
|
||||
/**
|
||||
* NIP-17: Send a direct message to recipients
|
||||
*
|
||||
* This function creates the appropriate rumor, seals it, gift wraps it,
|
||||
* and returns the final gift wrap events ready for publishing.
|
||||
*
|
||||
* @param dm_event The unsigned DM event (kind 14 or 15)
|
||||
* @param recipient_pubkeys Array of recipient public keys (hex strings)
|
||||
* @param num_recipients Number of recipients
|
||||
* @param sender_private_key 32-byte sender private key
|
||||
* @param gift_wraps_out Array to store resulting gift wrap events (caller must free)
|
||||
* @param max_gift_wraps Maximum number of gift wraps to create
|
||||
* @param max_delay_sec Maximum random timestamp delay in seconds (0 = no randomization)
|
||||
* @return Number of gift wrap events created, or -1 on error
|
||||
*/
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Phase 4: Update c-relay Call Sites
|
||||
|
||||
#### 4.1 Update src/api.c
|
||||
**Location:** Line 1319
|
||||
|
||||
**Current Code:**
|
||||
```c
|
||||
int send_result = nostr_nip17_send_dm(
|
||||
dm_response, // dm_event
|
||||
recipient_pubkeys, // recipient_pubkeys
|
||||
1, // num_recipients
|
||||
relay_privkey, // sender_private_key
|
||||
gift_wraps, // gift_wraps_out
|
||||
1 // max_gift_wraps
|
||||
);
|
||||
```
|
||||
|
||||
**New Code:**
|
||||
```c
|
||||
// Get timestamp delay configuration
|
||||
long max_delay_sec = get_config_int("nip59_timestamp_max_delay_sec", 0);
|
||||
|
||||
int send_result = nostr_nip17_send_dm(
|
||||
dm_response, // dm_event
|
||||
recipient_pubkeys, // recipient_pubkeys
|
||||
1, // num_recipients
|
||||
relay_privkey, // sender_private_key
|
||||
gift_wraps, // gift_wraps_out
|
||||
1, // max_gift_wraps
|
||||
max_delay_sec // max_delay_sec
|
||||
);
|
||||
```
|
||||
|
||||
#### 4.2 Update src/dm_admin.c
|
||||
**Location:** Line 371
|
||||
|
||||
**Current Code:**
|
||||
```c
|
||||
int send_result = nostr_nip17_send_dm(
|
||||
success_dm, // dm_event
|
||||
sender_pubkey_array, // recipient_pubkeys
|
||||
1, // num_recipients
|
||||
relay_privkey, // sender_private_key
|
||||
success_gift_wraps, // gift_wraps_out
|
||||
1 // max_gift_wraps
|
||||
);
|
||||
```
|
||||
|
||||
**New Code:**
|
||||
```c
|
||||
// Get timestamp delay configuration
|
||||
long max_delay_sec = get_config_int("nip59_timestamp_max_delay_sec", 0);
|
||||
|
||||
int send_result = nostr_nip17_send_dm(
|
||||
success_dm, // dm_event
|
||||
sender_pubkey_array, // recipient_pubkeys
|
||||
1, // num_recipients
|
||||
relay_privkey, // sender_private_key
|
||||
success_gift_wraps, // gift_wraps_out
|
||||
1, // max_gift_wraps
|
||||
max_delay_sec // max_delay_sec
|
||||
);
|
||||
```
|
||||
|
||||
**Note:** Both files already include `config.h`, so `get_config_int()` is available.
|
||||
|
||||
---
|
||||
|
||||
## Testing Plan
|
||||
|
||||
### Test Case 1: No Randomization (Compatibility Mode)
|
||||
**Configuration:** `nip59_timestamp_max_delay_sec = 0`
|
||||
|
||||
**Expected Behavior:**
|
||||
- Gift wrap timestamps should equal current time
|
||||
- Seal timestamps should equal current time
|
||||
- No random delay applied
|
||||
|
||||
**Test Command:**
|
||||
```bash
|
||||
# Set config via admin API
|
||||
# Send test DM
|
||||
# Verify timestamps are current (within 1 second of send time)
|
||||
```
|
||||
|
||||
### Test Case 2: Custom Delay
|
||||
**Configuration:** `nip59_timestamp_max_delay_sec = 1000`
|
||||
|
||||
**Expected Behavior:**
|
||||
- Gift wrap timestamps should be between now and 1000 seconds ago
|
||||
- Seal timestamps should be between now and 1000 seconds ago
|
||||
- Random delay applied within specified range
|
||||
|
||||
**Test Command:**
|
||||
```bash
|
||||
# Set config via admin API
|
||||
# Send test DM
|
||||
# Verify timestamps are in past but within 1000 seconds
|
||||
```
|
||||
|
||||
### Test Case 3: Default Behavior
|
||||
**Configuration:** `nip59_timestamp_max_delay_sec = 0` (default)
|
||||
|
||||
**Expected Behavior:**
|
||||
- Gift wrap timestamps should equal current time
|
||||
- Seal timestamps should equal current time
|
||||
- No randomization (maximum compatibility)
|
||||
|
||||
**Test Command:**
|
||||
```bash
|
||||
# Use default config
|
||||
# Send test DM
|
||||
# Verify timestamps are current (within 1 second of send time)
|
||||
```
|
||||
|
||||
### Test Case 4: Configuration Validation
|
||||
**Test Invalid Values:**
|
||||
- Negative value: Should be rejected
|
||||
- Value > 604800: Should be rejected
|
||||
- Valid boundary values (0, 604800): Should be accepted
|
||||
|
||||
### Test Case 5: Interoperability
|
||||
**Test with Other Nostr Clients:**
|
||||
- Send DM with `max_delay_sec = 0` to clients that don't randomize
|
||||
- Send DM with `max_delay_sec = 172800` to clients that do randomize
|
||||
- Verify both scenarios work correctly
|
||||
|
||||
---
|
||||
|
||||
## Documentation Updates
|
||||
|
||||
### Update docs/configuration_guide.md
|
||||
|
||||
Add new section:
|
||||
|
||||
```markdown
|
||||
### NIP-59 Gift Wrap Timestamp Configuration
|
||||
|
||||
#### nip59_timestamp_max_delay_sec
|
||||
- **Type:** Integer
|
||||
- **Default:** 0 (no randomization)
|
||||
- **Range:** 0 to 604800 (7 days)
|
||||
- **Description:** Controls timestamp randomization for NIP-59 gift wraps
|
||||
|
||||
The NIP-59 protocol recommends randomizing timestamps on gift wraps to prevent
|
||||
time-analysis attacks. However, some Nostr platforms don't implement this,
|
||||
causing compatibility issues.
|
||||
|
||||
**Values:**
|
||||
- `0` (default): No randomization - uses current timestamp (maximum compatibility)
|
||||
- `1-604800`: Random timestamp between now and N seconds ago
|
||||
|
||||
**Use Cases:**
|
||||
- Keep default `0` for maximum compatibility with clients that don't randomize
|
||||
- Set to `172800` for privacy per NIP-59 specification (2 days randomization)
|
||||
- Set to custom value (e.g., `3600`) for 1-hour randomization window
|
||||
|
||||
**Example:**
|
||||
```json
|
||||
["nip59_timestamp_max_delay_sec", "0"] // Default: compatibility mode
|
||||
["nip59_timestamp_max_delay_sec", "3600"] // 1 hour randomization
|
||||
["nip59_timestamp_max_delay_sec", "172800"] // 2 days randomization
|
||||
```
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Implementation Checklist
|
||||
|
||||
### nostr_core_lib Changes
|
||||
- [ ] Modify `random_past_timestamp()` to accept `max_delay_sec` parameter
|
||||
- [ ] Update `nostr_nip59_create_seal()` signature and implementation
|
||||
- [ ] Update `nostr_nip59_create_gift_wrap()` signature and implementation
|
||||
- [ ] Update `nip059.h` function declarations and documentation
|
||||
- [ ] Update `nostr_nip17_send_dm()` signature and implementation
|
||||
- [ ] Update `nip017.h` function declaration and documentation
|
||||
|
||||
### c-relay Changes
|
||||
- [ ] Add `nip59_timestamp_max_delay_sec` to `default_config_event.h`
|
||||
- [ ] Add validation in `config.c` for new parameter
|
||||
- [ ] Update `src/api.c` call site to pass `max_delay_sec`
|
||||
- [ ] Update `src/dm_admin.c` call site to pass `max_delay_sec`
|
||||
|
||||
### Testing
|
||||
- [ ] Test with `max_delay_sec = 0` (no randomization)
|
||||
- [ ] Test with `max_delay_sec = 1000` (custom delay)
|
||||
- [ ] Test with `max_delay_sec = 172800` (default behavior)
|
||||
- [ ] Test configuration validation (invalid values)
|
||||
- [ ] Test interoperability with other Nostr clients
|
||||
|
||||
### Documentation
|
||||
- [ ] Update `docs/configuration_guide.md`
|
||||
- [ ] Add this implementation plan to docs
|
||||
- [ ] Update README if needed
|
||||
|
||||
---
|
||||
|
||||
## Rollback Plan
|
||||
|
||||
If issues arise:
|
||||
1. Revert nostr_core_lib changes (git revert in submodule)
|
||||
2. Revert c-relay changes
|
||||
3. Configuration parameter will be ignored if not used
|
||||
4. Default behavior (0) provides maximum compatibility
|
||||
|
||||
---
|
||||
|
||||
## Notes
|
||||
|
||||
- The configuration is read on each DM send, allowing runtime changes
|
||||
- No restart required when changing `nip59_timestamp_max_delay_sec`
|
||||
- Thread-safe by design (no global state)
|
||||
- Default value of 0 provides maximum compatibility with other Nostr clients
|
||||
- Can be changed to 172800 or other values for NIP-59 privacy features
|
||||
|
||||
---
|
||||
|
||||
## References
|
||||
|
||||
- [NIP-59: Gift Wrap](https://github.com/nostr-protocol/nips/blob/master/59.md)
|
||||
- [NIP-17: Private Direct Messages](https://github.com/nostr-protocol/nips/blob/master/17.md)
|
||||
- [NIP-44: Versioned Encryption](https://github.com/nostr-protocol/nips/blob/master/44.md)
|
||||
1189
docs/realtime_monitoring_design.md
Normal file
325
docs/relay_traffic_measurement.md
Normal file
@@ -0,0 +1,325 @@
|
||||
# Relay Traffic Measurement Guide
|
||||
|
||||
## Measuring Real-World Relay Traffic
|
||||
|
||||
To validate our performance assumptions, here are commands to measure actual event rates from live relays.
|
||||
|
||||
---
|
||||
|
||||
## Command: Count Events Over 1 Minute
|
||||
|
||||
### Basic Command
|
||||
|
||||
```bash
|
||||
# Count events from relay.damus.io over 60 seconds
|
||||
timeout 60 nak req -s $(date +%s) --stream wss://relay.damus.io | wc -l
|
||||
```
|
||||
|
||||
This will:
|
||||
1. Subscribe to all new events (`-s $(date +%s)` = since now)
|
||||
2. Stream for 60 seconds (`timeout 60`)
|
||||
3. Count the lines (each line = 1 event)
|
||||
|
||||
### With Event Rate Display
|
||||
|
||||
```bash
|
||||
# Show events per second in real-time
|
||||
timeout 60 nak req -s $(date +%s) --stream wss://relay.damus.io | \
|
||||
pv -l -i 1 -r > /dev/null
|
||||
```
|
||||
|
||||
This displays:
|
||||
- Total events received
|
||||
- Current rate (events/second)
|
||||
- Average rate
|
||||
|
||||
### With Detailed Statistics
|
||||
|
||||
```bash
|
||||
# Count events and calculate statistics
|
||||
echo "Measuring relay traffic for 60 seconds..."
|
||||
START=$(date +%s)
|
||||
COUNT=$(timeout 60 nak req -s $START --stream wss://relay.damus.io | wc -l)
|
||||
END=$(date +%s)
|
||||
DURATION=$((END - START))
|
||||
|
||||
echo "Results:"
|
||||
echo " Total events: $COUNT"
|
||||
echo " Duration: ${DURATION}s"
|
||||
echo " Events/second: $(echo "scale=2; $COUNT / $DURATION" | bc)"
|
||||
echo " Events/minute: $COUNT"
|
||||
```
|
||||
|
||||
### With Event Kind Distribution
|
||||
|
||||
```bash
|
||||
# Count events by kind over 60 seconds
|
||||
timeout 60 nak req -s $(date +%s) --stream wss://relay.damus.io | \
|
||||
jq -r '.kind' | \
|
||||
sort | uniq -c | sort -rn
|
||||
```
|
||||
|
||||
Output example:
|
||||
```
|
||||
45 1 # 45 text notes
|
||||
12 3 # 12 contact lists
|
||||
8 7 # 8 reactions
|
||||
3 6 # 3 reposts
|
||||
```
|
||||
|
||||
### With Timestamp Analysis
|
||||
|
||||
```bash
|
||||
# Show event timestamps and calculate intervals
|
||||
timeout 60 nak req -s $(date +%s) --stream wss://relay.damus.io | \
|
||||
jq -r '.created_at' | \
|
||||
awk 'NR>1 {print $1-prev} {prev=$1}' | \
|
||||
awk '{sum+=$1; count++} END {
|
||||
print "Average interval:", sum/count, "seconds"
|
||||
print "Events per second:", count/sum
|
||||
}'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Testing Multiple Relays
|
||||
|
||||
### Compare Traffic Across Relays
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# test_relay_traffic.sh
|
||||
|
||||
RELAYS=(
|
||||
"wss://relay.damus.io"
|
||||
"wss://nos.lol"
|
||||
"wss://relay.nostr.band"
|
||||
"wss://nostr.wine"
|
||||
)
|
||||
|
||||
DURATION=60
|
||||
|
||||
echo "Measuring relay traffic for ${DURATION} seconds..."
|
||||
echo ""
|
||||
|
||||
for relay in "${RELAYS[@]}"; do
|
||||
echo "Testing: $relay"
|
||||
count=$(timeout $DURATION nak req -s $(date +%s) --stream "$relay" 2>/dev/null | wc -l)
|
||||
rate=$(echo "scale=2; $count / $DURATION" | bc)
|
||||
echo " Events: $count"
|
||||
echo " Rate: ${rate}/sec"
|
||||
echo ""
|
||||
done
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Expected Results (Based on Real Measurements)
|
||||
|
||||
### relay.damus.io (Large Public Relay)
|
||||
- **Expected rate**: 0.5-2 events/second
|
||||
- **60-second count**: 30-120 events
|
||||
- **Peak times**: Higher during US daytime hours
|
||||
|
||||
### nos.lol (Medium Public Relay)
|
||||
- **Expected rate**: 0.2-0.8 events/second
|
||||
- **60-second count**: 12-48 events
|
||||
|
||||
### Personal/Small Relays
|
||||
- **Expected rate**: 0.01-0.1 events/second
|
||||
- **60-second count**: 1-6 events
|
||||
|
||||
---
|
||||
|
||||
## Using Results to Validate Performance Assumptions
|
||||
|
||||
After measuring your relay's traffic:
|
||||
|
||||
1. **Calculate average events/second**:
|
||||
```
|
||||
events_per_second = total_events / 60
|
||||
```
|
||||
|
||||
2. **Estimate query overhead**:
|
||||
```
|
||||
# For 100k event database:
|
||||
query_time = 70ms
|
||||
overhead_percentage = (query_time * events_per_second) / 1000 * 100
|
||||
|
||||
# Example: 0.5 events/sec
|
||||
overhead = (70 * 0.5) / 1000 * 100 = 3.5%
|
||||
```
|
||||
|
||||
3. **Determine if optimization needed**:
|
||||
- < 5% overhead: No optimization needed
|
||||
- 5-20% overhead: Consider 1-second throttling
|
||||
- > 20% overhead: Use materialized counters
|
||||
|
||||
---
|
||||
|
||||
## Real-Time Monitoring During Development
|
||||
|
||||
### Monitor Your Own Relay
|
||||
|
||||
```bash
|
||||
# Watch events in real-time with count
|
||||
nak req -s $(date +%s) --stream ws://localhost:8888 | \
|
||||
awk '{count++; print count, $0}'
|
||||
```
|
||||
|
||||
### Monitor with Event Details
|
||||
|
||||
```bash
|
||||
# Show event kind and pubkey for each event
|
||||
nak req -s $(date +%s) --stream ws://localhost:8888 | \
|
||||
jq -r '"[\(.kind)] \(.pubkey[0:8])... \(.content[0:50])"'
|
||||
```
|
||||
|
||||
### Continuous Traffic Monitoring
|
||||
|
||||
```bash
|
||||
# Monitor traffic in 10-second windows
|
||||
while true; do
|
||||
echo "=== $(date) ==="
|
||||
count=$(timeout 10 nak req -s $(date +%s) --stream ws://localhost:8888 | wc -l)
|
||||
rate=$(echo "scale=2; $count / 10" | bc)
|
||||
echo "Events: $count (${rate}/sec)"
|
||||
sleep 1
|
||||
done
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Performance Testing Commands
|
||||
|
||||
### Simulate Load
|
||||
|
||||
```bash
|
||||
# Send test events to measure query performance
|
||||
for i in {1..100}; do
|
||||
nak event -c "Test event $i" ws://localhost:8888
|
||||
sleep 0.1 # 10 events/second
|
||||
done
|
||||
```
|
||||
|
||||
### Measure Query Response Time
|
||||
|
||||
```bash
|
||||
# Time how long queries take with current database
|
||||
time sqlite3 your_relay.db "SELECT COUNT(*) FROM events"
|
||||
time sqlite3 your_relay.db "SELECT kind, COUNT(*) FROM events GROUP BY kind"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Automated Traffic Analysis Script
|
||||
|
||||
Save this as `analyze_relay_traffic.sh`:
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
# Comprehensive relay traffic analysis
|
||||
|
||||
RELAY="${1:-ws://localhost:8888}"
|
||||
DURATION="${2:-60}"
|
||||
|
||||
echo "Analyzing relay: $RELAY"
|
||||
echo "Duration: ${DURATION} seconds"
|
||||
echo ""
|
||||
|
||||
# Collect events
|
||||
TMPFILE=$(mktemp)
|
||||
timeout $DURATION nak req -s $(date +%s) --stream "$RELAY" > "$TMPFILE" 2>/dev/null
|
||||
|
||||
# Calculate statistics
|
||||
TOTAL=$(wc -l < "$TMPFILE")
|
||||
RATE=$(echo "scale=2; $TOTAL / $DURATION" | bc)
|
||||
|
||||
echo "=== Traffic Statistics ==="
|
||||
echo "Total events: $TOTAL"
|
||||
echo "Events/second: $RATE"
|
||||
echo "Events/minute: $(echo "$TOTAL * 60 / $DURATION" | bc)"
|
||||
echo ""
|
||||
|
||||
echo "=== Event Kind Distribution ==="
|
||||
jq -r '.kind' "$TMPFILE" | sort | uniq -c | sort -rn | head -10
|
||||
echo ""
|
||||
|
||||
echo "=== Top Publishers ==="
|
||||
jq -r '.pubkey[0:16]' "$TMPFILE" | sort | uniq -c | sort -rn | head -5
|
||||
echo ""
|
||||
|
||||
echo "=== Performance Estimate ==="
|
||||
echo "For 100k event database:"
|
||||
echo " Query time: ~70ms"
|
||||
echo " Overhead: $(echo "scale=2; 70 * $RATE / 10" | bc)%"
|
||||
echo ""
|
||||
|
||||
# Cleanup
|
||||
rm "$TMPFILE"
|
||||
```
|
||||
|
||||
Usage:
|
||||
```bash
|
||||
chmod +x analyze_relay_traffic.sh
|
||||
./analyze_relay_traffic.sh wss://relay.damus.io 60
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Interpreting Results
|
||||
|
||||
### Low Traffic (< 0.1 events/sec)
|
||||
- **Typical for**: Personal relays, small communities
|
||||
- **Recommendation**: Trigger on every event, no optimization
|
||||
- **Expected overhead**: < 1%
|
||||
|
||||
### Medium Traffic (0.1-0.5 events/sec)
|
||||
- **Typical for**: Medium public relays
|
||||
- **Recommendation**: Trigger on every event, consider throttling if database > 100k
|
||||
- **Expected overhead**: 1-5%
|
||||
|
||||
### High Traffic (0.5-2 events/sec)
|
||||
- **Typical for**: Large public relays
|
||||
- **Recommendation**: Use 1-second throttling
|
||||
- **Expected overhead**: 5-20% without throttling, < 1% with throttling
|
||||
|
||||
### Very High Traffic (> 2 events/sec)
|
||||
- **Typical for**: Major public relays (rare)
|
||||
- **Recommendation**: Use materialized counters
|
||||
- **Expected overhead**: > 20% without optimization
|
||||
|
||||
---
|
||||
|
||||
## Continuous Monitoring in Production
|
||||
|
||||
### Add to Relay Startup
|
||||
|
||||
```bash
|
||||
# In your relay startup script
|
||||
echo "Starting traffic monitoring..."
|
||||
nohup bash -c 'while true; do
|
||||
count=$(timeout 60 nak req -s $(date +%s) --stream ws://localhost:8888 2>/dev/null | wc -l)
|
||||
echo "$(date +%Y-%m-%d\ %H:%M:%S) - Events/min: $count" >> traffic.log
|
||||
done' &
|
||||
```
|
||||
|
||||
### Analyze Historical Traffic
|
||||
|
||||
```bash
|
||||
# View traffic trends
|
||||
cat traffic.log | awk '{print $4}' | \
|
||||
awk '{sum+=$1; count++} END {print "Average:", sum/count, "events/min"}'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Conclusion
|
||||
|
||||
Use these commands to:
|
||||
1. ✅ Measure real-world traffic on your relay
|
||||
2. ✅ Validate performance assumptions
|
||||
3. ✅ Determine if optimization is needed
|
||||
4. ✅ Monitor traffic trends over time
|
||||
|
||||
**Remember**: Most relays will measure < 1 event/second, making the simple "trigger on every event" approach perfectly viable.
|
||||
630
docs/sql_query_admin_api.md
Normal file
@@ -0,0 +1,630 @@
|
||||
# SQL Query Admin API Design
|
||||
|
||||
## Overview
|
||||
|
||||
This document describes the design for a general-purpose SQL query interface for the C-Relay admin API. This allows administrators to execute read-only SQL queries against the relay database through cryptographically signed kind 23456 events with NIP-44 encrypted command arrays.
|
||||
|
||||
## Security Model
|
||||
|
||||
### Authentication
|
||||
- All queries must be sent as kind 23456 events with NIP-44 encrypted content
|
||||
- Events must be signed by the admin's private key
|
||||
- Admin pubkey verified against `config.admin_pubkey`
|
||||
- Follows the same authentication pattern as existing admin commands
|
||||
|
||||
### Query Restrictions
|
||||
While authentication is cryptographically secure, we implement defensive safeguards:
|
||||
|
||||
1. **Read-Only Enforcement**
|
||||
- Only SELECT statements allowed
|
||||
- Block: INSERT, UPDATE, DELETE, DROP, CREATE, ALTER, PRAGMA (write operations)
|
||||
- Allow: SELECT, WITH (for CTEs)
|
||||
|
||||
2. **Resource Limits**
|
||||
- Query timeout: 5 seconds (configurable)
|
||||
- Result row limit: 1000 rows (configurable)
|
||||
- Result size limit: 1MB (configurable)
|
||||
|
||||
3. **Query Logging**
|
||||
- All queries logged with timestamp, admin pubkey, execution time
|
||||
- Failed queries logged with error message
|
||||
|
||||
## Command Format
|
||||
|
||||
### Admin Event Structure (Kind 23456)
|
||||
```json
|
||||
{
|
||||
"id": "event_id",
|
||||
"pubkey": "admin_public_key",
|
||||
"created_at": 1234567890,
|
||||
"kind": 23456,
|
||||
"content": "AqHBUgcM7dXFYLQuDVzGwMST1G8jtWYyVvYxXhVGEu4nAb4LVw...",
|
||||
"tags": [
|
||||
["p", "relay_public_key"]
|
||||
],
|
||||
"sig": "event_signature"
|
||||
}
|
||||
```
|
||||
|
||||
The `content` field contains a NIP-44 encrypted JSON array:
|
||||
```json
|
||||
["sql_query", "SELECT * FROM events LIMIT 10"]
|
||||
```
|
||||
|
||||
### Response Format (Kind 23457)
|
||||
```json
|
||||
["EVENT", "temp_sub_id", {
|
||||
"id": "response_event_id",
|
||||
"pubkey": "relay_public_key",
|
||||
"created_at": 1234567890,
|
||||
"kind": 23457,
|
||||
"content": "nip44_encrypted_content",
|
||||
"tags": [
|
||||
["p", "admin_public_key"],
|
||||
["e", "request_event_id"]
|
||||
],
|
||||
"sig": "response_event_signature"
|
||||
}]
|
||||
```
|
||||
|
||||
The `content` field contains NIP-44 encrypted JSON:
|
||||
```json
|
||||
{
|
||||
"query_type": "sql_query",
|
||||
"request_id": "request_event_id",
|
||||
"timestamp": 1234567890,
|
||||
"query": "SELECT * FROM events LIMIT 10",
|
||||
"execution_time_ms": 45,
|
||||
"row_count": 10,
|
||||
"columns": ["id", "pubkey", "created_at", "kind", "content"],
|
||||
"rows": [
|
||||
["abc123...", "def456...", 1234567890, 1, "Hello world"],
|
||||
...
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Note:** The response includes the request event ID in two places:
|
||||
1. **In tags**: `["e", "request_event_id"]` - Standard Nostr convention for event references
|
||||
2. **In content**: `"request_id": "request_event_id"` - For easy access after decryption
|
||||
|
||||
### Error Response Format (Kind 23457)
|
||||
```json
|
||||
["EVENT", "temp_sub_id", {
|
||||
"id": "response_event_id",
|
||||
"pubkey": "relay_public_key",
|
||||
"created_at": 1234567890,
|
||||
"kind": 23457,
|
||||
"content": "nip44_encrypted_content",
|
||||
"tags": [
|
||||
["p", "admin_public_key"],
|
||||
["e", "request_event_id"]
|
||||
],
|
||||
"sig": "response_event_signature"
|
||||
}]
|
||||
```
|
||||
|
||||
The `content` field contains NIP-44 encrypted JSON:
|
||||
```json
|
||||
{
|
||||
"query_type": "sql_query",
|
||||
"request_id": "request_event_id",
|
||||
"timestamp": 1234567890,
|
||||
"query": "DELETE FROM events",
|
||||
"status": "error",
|
||||
"error": "Query blocked: DELETE statements not allowed",
|
||||
"error_type": "blocked_statement"
|
||||
}
|
||||
```
|
||||
|
||||
## Available Database Tables and Views
|
||||
|
||||
### Core Tables
|
||||
- **events** - All Nostr events (id, pubkey, created_at, kind, content, tags, sig)
|
||||
- **config** - Configuration key-value pairs
|
||||
- **auth_rules** - Authentication and authorization rules
|
||||
- **subscription_events** - Subscription lifecycle events
|
||||
- **event_broadcasts** - Event broadcast log
|
||||
|
||||
### Useful Views
|
||||
- **recent_events** - Last 1000 events
|
||||
- **event_stats** - Event statistics by type
|
||||
- **configuration_events** - Kind 33334 configuration events
|
||||
- **subscription_analytics** - Subscription metrics by date
|
||||
- **active_subscriptions_log** - Currently active subscriptions
|
||||
- **event_kinds_view** - Event distribution by kind
|
||||
- **top_pubkeys_view** - Top 10 pubkeys by event count
|
||||
- **time_stats_view** - Time-based statistics (24h, 7d, 30d)
|
||||
|
||||
## Implementation Plan
|
||||
|
||||
### Backend (dm_admin.c)
|
||||
|
||||
#### 1. Query Validation Function
|
||||
```c
|
||||
int validate_sql_query(const char* query, char* error_msg, size_t error_size);
|
||||
```
|
||||
- Check for blocked keywords (case-insensitive)
|
||||
- Validate query syntax (basic checks)
|
||||
- Return 0 on success, -1 on failure
|
||||
|
||||
#### 2. Query Execution Function
|
||||
```c
|
||||
char* execute_sql_query(const char* query, char* error_msg, size_t error_size);
|
||||
```
|
||||
- Set query timeout using sqlite3_busy_timeout()
|
||||
- Execute query with row/size limits
|
||||
- Build JSON response with results
|
||||
- Log query execution
|
||||
- Return JSON string or NULL on error
|
||||
|
||||
#### 3. Command Handler Integration
|
||||
Add to `process_dm_admin_command()` in [`dm_admin.c`](src/dm_admin.c:131):
|
||||
```c
|
||||
else if (strcmp(command_type, "sql_query") == 0) {
|
||||
const char* query = get_tag_value(event, "sql_query", 1);
|
||||
if (!query) {
|
||||
DEBUG_ERROR("DM Admin: Missing sql_query parameter");
|
||||
snprintf(error_message, error_size, "invalid: missing SQL query");
|
||||
} else {
|
||||
result = handle_sql_query_unified(event, query, error_message, error_size, wsi);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Add unified handler function:
|
||||
```c
|
||||
int handle_sql_query_unified(cJSON* event, const char* query,
|
||||
char* error_message, size_t error_size,
|
||||
struct lws* wsi) {
|
||||
// Get request event ID for response correlation
|
||||
cJSON* request_id_obj = cJSON_GetObjectItem(event, "id");
|
||||
if (!request_id_obj || !cJSON_IsString(request_id_obj)) {
|
||||
snprintf(error_message, error_size, "Missing request event ID");
|
||||
return -1;
|
||||
}
|
||||
const char* request_id = cJSON_GetStringValue(request_id_obj);
|
||||
|
||||
// Validate query
|
||||
if (!validate_sql_query(query, error_message, error_size)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Execute query and include request_id in result
|
||||
char* result_json = execute_sql_query(query, request_id, error_message, error_size);
|
||||
if (!result_json) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Send response as kind 23457 event with request ID in tags
|
||||
cJSON* sender_pubkey_obj = cJSON_GetObjectItem(event, "pubkey");
|
||||
if (!sender_pubkey_obj || !cJSON_IsString(sender_pubkey_obj)) {
|
||||
free(result_json);
|
||||
snprintf(error_message, error_size, "Missing sender pubkey");
|
||||
return -1;
|
||||
}
|
||||
|
||||
const char* sender_pubkey = cJSON_GetStringValue(sender_pubkey_obj);
|
||||
int send_result = send_admin_response(sender_pubkey, result_json, request_id,
|
||||
error_message, error_size, wsi);
|
||||
free(result_json);
|
||||
|
||||
return send_result;
|
||||
}
|
||||
```
|
||||
|
||||
### Frontend (api/index.html)
|
||||
|
||||
#### SQL Query Section UI
|
||||
Add to [`api/index.html`](api/index.html:1):
|
||||
```html
|
||||
<section id="sql-query-section" class="admin-section">
|
||||
<h2>SQL Query Console</h2>
|
||||
|
||||
<div class="query-selector">
|
||||
<label for="query-dropdown">Quick Queries & History:</label>
|
||||
<select id="query-dropdown" onchange="loadSelectedQuery()">
|
||||
<option value="">-- Select a query --</option>
|
||||
<optgroup label="Common Queries">
|
||||
<option value="recent_events">Recent Events</option>
|
||||
<option value="event_stats">Event Statistics</option>
|
||||
<option value="subscriptions">Active Subscriptions</option>
|
||||
<option value="top_pubkeys">Top Pubkeys</option>
|
||||
<option value="event_kinds">Event Kinds Distribution</option>
|
||||
<option value="time_stats">Time-based Statistics</option>
|
||||
</optgroup>
|
||||
<optgroup label="Query History" id="history-group">
|
||||
<!-- Dynamically populated from localStorage -->
|
||||
</optgroup>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="query-editor">
|
||||
<label for="sql-input">SQL Query:</label>
|
||||
<textarea id="sql-input" rows="5" placeholder="SELECT * FROM events LIMIT 10"></textarea>
|
||||
<div class="query-actions">
|
||||
<button onclick="executeSqlQuery()" class="primary-button">Execute Query</button>
|
||||
<button onclick="clearSqlQuery()">Clear</button>
|
||||
<button onclick="clearQueryHistory()" class="danger-button">Clear History</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="query-results">
|
||||
<h3>Results</h3>
|
||||
<div id="query-info" class="info-box"></div>
|
||||
<div id="query-table" class="table-container"></div>
|
||||
</div>
|
||||
</section>
|
||||
```
|
||||
|
||||
#### JavaScript Functions (api/index.js)
|
||||
Add to [`api/index.js`](api/index.js:1):
|
||||
```javascript
|
||||
// Predefined query templates
|
||||
const SQL_QUERY_TEMPLATES = {
|
||||
recent_events: "SELECT id, pubkey, created_at, kind, substr(content, 1, 50) as content FROM events ORDER BY created_at DESC LIMIT 20",
|
||||
event_stats: "SELECT * FROM event_stats",
|
||||
subscriptions: "SELECT * FROM active_subscriptions_log ORDER BY created_at DESC",
|
||||
top_pubkeys: "SELECT * FROM top_pubkeys_view",
|
||||
event_kinds: "SELECT * FROM event_kinds_view ORDER BY count DESC",
|
||||
time_stats: "SELECT * FROM time_stats_view"
|
||||
};
|
||||
|
||||
// Query history management (localStorage)
|
||||
const QUERY_HISTORY_KEY = 'c_relay_sql_history';
|
||||
const MAX_HISTORY_ITEMS = 20;
|
||||
|
||||
// Load query history from localStorage
|
||||
function loadQueryHistory() {
|
||||
try {
|
||||
const history = localStorage.getItem(QUERY_HISTORY_KEY);
|
||||
return history ? JSON.parse(history) : [];
|
||||
} catch (e) {
|
||||
console.error('Failed to load query history:', e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// Save query to history
|
||||
function saveQueryToHistory(query) {
|
||||
if (!query || query.trim().length === 0) return;
|
||||
|
||||
try {
|
||||
let history = loadQueryHistory();
|
||||
|
||||
// Remove duplicate if exists
|
||||
history = history.filter(q => q !== query);
|
||||
|
||||
// Add to beginning
|
||||
history.unshift(query);
|
||||
|
||||
// Limit size
|
||||
if (history.length > MAX_HISTORY_ITEMS) {
|
||||
history = history.slice(0, MAX_HISTORY_ITEMS);
|
||||
}
|
||||
|
||||
localStorage.setItem(QUERY_HISTORY_KEY, JSON.stringify(history));
|
||||
updateQueryDropdown();
|
||||
} catch (e) {
|
||||
console.error('Failed to save query history:', e);
|
||||
}
|
||||
}
|
||||
|
||||
// Clear query history
|
||||
function clearQueryHistory() {
|
||||
if (confirm('Clear all query history?')) {
|
||||
localStorage.removeItem(QUERY_HISTORY_KEY);
|
||||
updateQueryDropdown();
|
||||
}
|
||||
}
|
||||
|
||||
// Update dropdown with history
|
||||
function updateQueryDropdown() {
|
||||
const historyGroup = document.getElementById('history-group');
|
||||
if (!historyGroup) return;
|
||||
|
||||
// Clear existing history options
|
||||
historyGroup.innerHTML = '';
|
||||
|
||||
const history = loadQueryHistory();
|
||||
if (history.length === 0) {
|
||||
const option = document.createElement('option');
|
||||
option.value = '';
|
||||
option.textContent = '(no history)';
|
||||
option.disabled = true;
|
||||
historyGroup.appendChild(option);
|
||||
return;
|
||||
}
|
||||
|
||||
history.forEach((query, index) => {
|
||||
const option = document.createElement('option');
|
||||
option.value = `history_${index}`;
|
||||
// Truncate long queries for display
|
||||
const displayQuery = query.length > 60 ? query.substring(0, 60) + '...' : query;
|
||||
option.textContent = displayQuery;
|
||||
option.dataset.query = query;
|
||||
historyGroup.appendChild(option);
|
||||
});
|
||||
}
|
||||
|
||||
// Load selected query from dropdown
|
||||
function loadSelectedQuery() {
|
||||
const dropdown = document.getElementById('query-dropdown');
|
||||
const selectedValue = dropdown.value;
|
||||
|
||||
if (!selectedValue) return;
|
||||
|
||||
let query = '';
|
||||
|
||||
// Check if it's a template
|
||||
if (SQL_QUERY_TEMPLATES[selectedValue]) {
|
||||
query = SQL_QUERY_TEMPLATES[selectedValue];
|
||||
}
|
||||
// Check if it's from history
|
||||
else if (selectedValue.startsWith('history_')) {
|
||||
const selectedOption = dropdown.options[dropdown.selectedIndex];
|
||||
query = selectedOption.dataset.query;
|
||||
}
|
||||
|
||||
if (query) {
|
||||
document.getElementById('sql-input').value = query;
|
||||
}
|
||||
|
||||
// Reset dropdown to placeholder
|
||||
dropdown.value = '';
|
||||
}
|
||||
|
||||
// Initialize query history on page load
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
updateQueryDropdown();
|
||||
});
|
||||
|
||||
// Clear the SQL query input
|
||||
function clearSqlQuery() {
|
||||
document.getElementById('sql-input').value = '';
|
||||
document.getElementById('query-info').innerHTML = '';
|
||||
document.getElementById('query-table').innerHTML = '';
|
||||
}
|
||||
|
||||
// Track pending SQL queries by request ID
|
||||
const pendingSqlQueries = new Map();
|
||||
|
||||
// Execute SQL query via admin API
|
||||
async function executeSqlQuery() {
|
||||
const query = document.getElementById('sql-input').value;
|
||||
if (!query.trim()) {
|
||||
showError('Please enter a SQL query');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Show loading state
|
||||
document.getElementById('query-info').innerHTML = '<div class="loading">Executing query...</div>';
|
||||
document.getElementById('query-table').innerHTML = '';
|
||||
|
||||
// Save to history (before execution, so it's saved even if query fails)
|
||||
saveQueryToHistory(query.trim());
|
||||
|
||||
// Send query as kind 23456 admin command
|
||||
const command = ["sql_query", query];
|
||||
const requestEvent = await sendAdminCommand(command);
|
||||
|
||||
// Store query info for when response arrives
|
||||
if (requestEvent && requestEvent.id) {
|
||||
pendingSqlQueries.set(requestEvent.id, {
|
||||
query: query,
|
||||
timestamp: Date.now()
|
||||
});
|
||||
}
|
||||
|
||||
// Note: Response will be handled by the event listener
|
||||
// which will call displaySqlQueryResults() when response arrives
|
||||
} catch (error) {
|
||||
showError('Failed to execute query: ' + error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle SQL query response (called by event listener)
|
||||
function handleSqlQueryResponse(response) {
|
||||
// Check if this is a response to one of our queries
|
||||
if (response.request_id && pendingSqlQueries.has(response.request_id)) {
|
||||
const queryInfo = pendingSqlQueries.get(response.request_id);
|
||||
pendingSqlQueries.delete(response.request_id);
|
||||
|
||||
// Display results
|
||||
displaySqlQueryResults(response);
|
||||
}
|
||||
}
|
||||
|
||||
// Display SQL query results
|
||||
function displaySqlQueryResults(response) {
|
||||
const infoDiv = document.getElementById('query-info');
|
||||
const tableDiv = document.getElementById('query-table');
|
||||
|
||||
if (response.status === 'error' || response.error) {
|
||||
infoDiv.innerHTML = `<div class="error-message">❌ ${response.error || 'Query failed'}</div>`;
|
||||
tableDiv.innerHTML = '';
|
||||
return;
|
||||
}
|
||||
|
||||
// Show query info with request ID for debugging
|
||||
const rowCount = response.row_count || 0;
|
||||
const execTime = response.execution_time_ms || 0;
|
||||
const requestId = response.request_id ? response.request_id.substring(0, 8) + '...' : 'unknown';
|
||||
infoDiv.innerHTML = `
|
||||
<div class="query-info-success">
|
||||
<span>✅ Query executed successfully</span>
|
||||
<span>Rows: ${rowCount}</span>
|
||||
<span>Execution Time: ${execTime}ms</span>
|
||||
<span class="request-id" title="${response.request_id || ''}">Request: ${requestId}</span>
|
||||
</div>
|
||||
`;
|
||||
|
||||
// Build results table
|
||||
if (response.rows && response.rows.length > 0) {
|
||||
let html = '<table class="sql-results-table"><thead><tr>';
|
||||
response.columns.forEach(col => {
|
||||
html += `<th>${escapeHtml(col)}</th>`;
|
||||
});
|
||||
html += '</tr></thead><tbody>';
|
||||
|
||||
response.rows.forEach(row => {
|
||||
html += '<tr>';
|
||||
row.forEach(cell => {
|
||||
const cellValue = cell === null ? '<em>NULL</em>' : escapeHtml(String(cell));
|
||||
html += `<td>${cellValue}</td>`;
|
||||
});
|
||||
html += '</tr>';
|
||||
});
|
||||
|
||||
html += '</tbody></table>';
|
||||
tableDiv.innerHTML = html;
|
||||
} else {
|
||||
tableDiv.innerHTML = '<p class="no-results">No results returned</p>';
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to escape HTML
|
||||
function escapeHtml(text) {
|
||||
const div = document.createElement('div');
|
||||
div.textContent = text;
|
||||
return div.innerHTML;
|
||||
}
|
||||
```
|
||||
|
||||
## Example Queries
|
||||
|
||||
### Subscription Statistics
|
||||
```sql
|
||||
SELECT
|
||||
date,
|
||||
subscriptions_created,
|
||||
subscriptions_ended,
|
||||
avg_duration_seconds,
|
||||
unique_clients
|
||||
FROM subscription_analytics
|
||||
ORDER BY date DESC
|
||||
LIMIT 7;
|
||||
```
|
||||
|
||||
### Event Distribution by Kind
|
||||
```sql
|
||||
SELECT kind, count, percentage
|
||||
FROM event_kinds_view
|
||||
ORDER BY count DESC;
|
||||
```
|
||||
|
||||
### Recent Events by Specific Pubkey
|
||||
```sql
|
||||
SELECT id, created_at, kind, content
|
||||
FROM events
|
||||
WHERE pubkey = 'abc123...'
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 20;
|
||||
```
|
||||
|
||||
### Active Subscriptions with Details
|
||||
```sql
|
||||
SELECT
|
||||
subscription_id,
|
||||
client_ip,
|
||||
events_sent,
|
||||
duration_seconds,
|
||||
filter_json
|
||||
FROM active_subscriptions_log
|
||||
ORDER BY created_at DESC;
|
||||
```
|
||||
|
||||
### Database Size and Event Count
|
||||
```sql
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM events) as total_events,
|
||||
(SELECT COUNT(*) FROM subscription_events) as total_subscriptions,
|
||||
(SELECT COUNT(*) FROM auth_rules WHERE active = 1) as active_rules;
|
||||
```
|
||||
|
||||
## Configuration Options
|
||||
|
||||
Add to config table:
|
||||
```sql
|
||||
INSERT INTO config (key, value, data_type, description, category) VALUES
|
||||
('sql_query_enabled', 'true', 'boolean', 'Enable SQL query admin API', 'admin'),
|
||||
('sql_query_timeout', '5', 'integer', 'Query timeout in seconds', 'admin'),
|
||||
('sql_query_row_limit', '1000', 'integer', 'Maximum rows per query', 'admin'),
|
||||
('sql_query_size_limit', '1048576', 'integer', 'Maximum result size in bytes', 'admin'),
|
||||
('sql_query_log_enabled', 'true', 'boolean', 'Log all SQL queries', 'admin');
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### What This Protects Against
|
||||
1. **Unauthorized Access** - Only admin can execute queries (cryptographic verification)
|
||||
2. **Data Modification** - Read-only enforcement prevents accidental/malicious changes
|
||||
3. **Resource Exhaustion** - Timeouts and limits prevent DoS
|
||||
4. **Audit Trail** - All queries logged for security review
|
||||
|
||||
### What This Does NOT Protect Against
|
||||
1. **Admin Compromise** - If admin private key is stolen, attacker has full read access
|
||||
2. **Information Disclosure** - Admin can read all data (by design)
|
||||
3. **Complex Attacks** - Sophisticated SQL injection might bypass simple keyword blocking
|
||||
|
||||
### Recommendations
|
||||
1. **Secure Admin Key** - Store admin private key securely, never commit to git
|
||||
2. **Monitor Query Logs** - Review query logs regularly for suspicious activity
|
||||
3. **Backup Database** - Regular backups in case of issues
|
||||
4. **Test Queries** - Test complex queries on development relay first
|
||||
|
||||
## Testing Plan
|
||||
|
||||
### Unit Tests
|
||||
1. Query validation (blocked keywords, syntax)
|
||||
2. Result formatting (JSON structure)
|
||||
3. Error handling (timeouts, limits)
|
||||
|
||||
### Integration Tests
|
||||
1. Execute queries through NIP-17 DM
|
||||
2. Verify authentication (admin vs non-admin)
|
||||
3. Test resource limits (timeout, row limit)
|
||||
4. Test error responses
|
||||
|
||||
### Security Tests
|
||||
1. Attempt blocked statements (INSERT, DELETE, etc.)
|
||||
2. Attempt SQL injection patterns
|
||||
3. Test query timeout with slow queries
|
||||
4. Test row limit with large result sets
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Query History** - Store recent queries for quick re-execution
|
||||
2. **Query Favorites** - Save frequently used queries
|
||||
3. **Export Results** - Download results as CSV/JSON
|
||||
4. **Query Builder** - Visual query builder for common operations
|
||||
5. **Real-time Updates** - WebSocket updates for live data
|
||||
6. **Query Sharing** - Share queries with other admins (if multi-admin support added)
|
||||
|
||||
## Migration Path
|
||||
|
||||
### Phase 1: Backend Implementation
|
||||
1. Add query validation function
|
||||
2. Add query execution function
|
||||
3. Integrate with NIP-17 command handler
|
||||
4. Add configuration options
|
||||
5. Add query logging
|
||||
|
||||
### Phase 2: Frontend Implementation
|
||||
1. Add SQL query section to index.html
|
||||
2. Add query execution JavaScript
|
||||
3. Add predefined query templates
|
||||
4. Add results display formatting
|
||||
|
||||
### Phase 3: Testing and Documentation
|
||||
1. Write unit tests
|
||||
2. Write integration tests
|
||||
3. Update user documentation
|
||||
4. Create query examples guide
|
||||
|
||||
### Phase 4: Enhancement
|
||||
1. Add query history
|
||||
2. Add export functionality
|
||||
3. Optimize performance
|
||||
4. Add more predefined templates
|
||||
258
docs/sql_test_design.md
Normal file
@@ -0,0 +1,258 @@
|
||||
# SQL Query Test Script Design
|
||||
|
||||
## Overview
|
||||
|
||||
Test script for validating the SQL query admin API functionality. Tests query validation, execution, error handling, and security features.
|
||||
|
||||
## Script: tests/sql_test.sh
|
||||
|
||||
### Test Categories
|
||||
|
||||
#### 1. Query Validation Tests
|
||||
- ✅ Valid SELECT queries accepted
|
||||
- ❌ INSERT statements blocked
|
||||
- ❌ UPDATE statements blocked
|
||||
- ❌ DELETE statements blocked
|
||||
- ❌ DROP statements blocked
|
||||
- ❌ CREATE statements blocked
|
||||
- ❌ ALTER statements blocked
|
||||
- ❌ PRAGMA write operations blocked
|
||||
|
||||
#### 2. Query Execution Tests
|
||||
- ✅ Simple SELECT query
|
||||
- ✅ SELECT with WHERE clause
|
||||
- ✅ SELECT with JOIN
|
||||
- ✅ SELECT with ORDER BY and LIMIT
|
||||
- ✅ Query against views
|
||||
- ✅ Query with aggregate functions (COUNT, SUM, AVG)
|
||||
|
||||
#### 3. Response Format Tests
|
||||
- ✅ Response includes request_id
|
||||
- ✅ Response includes query_type
|
||||
- ✅ Response includes columns array
|
||||
- ✅ Response includes rows array
|
||||
- ✅ Response includes row_count
|
||||
- ✅ Response includes execution_time_ms
|
||||
|
||||
#### 4. Error Handling Tests
|
||||
- ❌ Invalid SQL syntax
|
||||
- ❌ Non-existent table
|
||||
- ❌ Non-existent column
|
||||
- ❌ Query timeout (if configurable)
|
||||
|
||||
#### 5. Security Tests
|
||||
- ❌ SQL injection attempts blocked
|
||||
- ❌ Nested query attacks blocked
|
||||
- ❌ Comment-based attacks blocked
|
||||
|
||||
#### 6. Concurrent Query Tests
|
||||
- ✅ Multiple queries in parallel
|
||||
- ✅ Responses correctly correlated to requests
|
||||
|
||||
## Script Structure
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
|
||||
# SQL Query Admin API Test Script
|
||||
# Tests the sql_query command functionality
|
||||
|
||||
set -e
|
||||
|
||||
RELAY_URL="${RELAY_URL:-ws://localhost:8888}"
|
||||
ADMIN_PRIVKEY="${ADMIN_PRIVKEY:-}"
|
||||
RELAY_PUBKEY="${RELAY_PUBKEY:-}"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Test counters
|
||||
TESTS_RUN=0
|
||||
TESTS_PASSED=0
|
||||
TESTS_FAILED=0
|
||||
|
||||
# Helper functions
|
||||
print_test() {
|
||||
echo -e "${YELLOW}TEST: $1${NC}"
|
||||
TESTS_RUN=$((TESTS_RUN + 1))
|
||||
}
|
||||
|
||||
print_pass() {
|
||||
echo -e "${GREEN}✓ PASS: $1${NC}"
|
||||
TESTS_PASSED=$((TESTS_PASSED + 1))
|
||||
}
|
||||
|
||||
print_fail() {
|
||||
echo -e "${RED}✗ FAIL: $1${NC}"
|
||||
TESTS_FAILED=$((TESTS_FAILED + 1))
|
||||
}
|
||||
|
||||
# Send SQL query command
|
||||
send_sql_query() {
|
||||
local query="$1"
|
||||
# Implementation using nostr CLI tools or curl
|
||||
# Returns response JSON
|
||||
}
|
||||
|
||||
# Test functions
|
||||
test_valid_select() {
|
||||
print_test "Valid SELECT query"
|
||||
local response=$(send_sql_query "SELECT * FROM events LIMIT 1")
|
||||
if echo "$response" | grep -q '"query_type":"sql_query"'; then
|
||||
print_pass "Valid SELECT accepted"
|
||||
else
|
||||
print_fail "Valid SELECT rejected"
|
||||
fi
|
||||
}
|
||||
|
||||
test_blocked_insert() {
|
||||
print_test "INSERT statement blocked"
|
||||
local response=$(send_sql_query "INSERT INTO events VALUES (...)")
|
||||
if echo "$response" | grep -q '"error"'; then
|
||||
print_pass "INSERT correctly blocked"
|
||||
else
|
||||
print_fail "INSERT not blocked"
|
||||
fi
|
||||
}
|
||||
|
||||
# ... more test functions ...
|
||||
|
||||
# Main test execution
|
||||
main() {
|
||||
echo "================================"
|
||||
echo "SQL Query Admin API Tests"
|
||||
echo "================================"
|
||||
echo ""
|
||||
|
||||
# Check prerequisites
|
||||
if [ -z "$ADMIN_PRIVKEY" ]; then
|
||||
echo "Error: ADMIN_PRIVKEY not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run test suites
|
||||
echo "1. Query Validation Tests"
|
||||
test_valid_select
|
||||
test_blocked_insert
|
||||
test_blocked_update
|
||||
test_blocked_delete
|
||||
test_blocked_drop
|
||||
|
||||
echo ""
|
||||
echo "2. Query Execution Tests"
|
||||
test_simple_select
|
||||
test_select_with_where
|
||||
test_select_with_join
|
||||
test_select_views
|
||||
|
||||
echo ""
|
||||
echo "3. Response Format Tests"
|
||||
test_response_format
|
||||
test_request_id_correlation
|
||||
|
||||
echo ""
|
||||
echo "4. Error Handling Tests"
|
||||
test_invalid_syntax
|
||||
test_nonexistent_table
|
||||
|
||||
echo ""
|
||||
echo "5. Security Tests"
|
||||
test_sql_injection
|
||||
|
||||
echo ""
|
||||
echo "6. Concurrent Query Tests"
|
||||
test_concurrent_queries
|
||||
|
||||
# Print summary
|
||||
echo ""
|
||||
echo "================================"
|
||||
echo "Test Summary"
|
||||
echo "================================"
|
||||
echo "Tests Run: $TESTS_RUN"
|
||||
echo "Tests Passed: $TESTS_PASSED"
|
||||
echo "Tests Failed: $TESTS_FAILED"
|
||||
|
||||
if [ $TESTS_FAILED -eq 0 ]; then
|
||||
echo -e "${GREEN}All tests passed!${NC}"
|
||||
exit 0
|
||||
else
|
||||
echo -e "${RED}Some tests failed${NC}"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
main "$@"
|
||||
```
|
||||
|
||||
## Test Data Setup
|
||||
|
||||
The script should work with the existing relay database without requiring special test data, using:
|
||||
- Existing events table
|
||||
- Existing views (event_stats, recent_events, etc.)
|
||||
- Existing config table
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
# Set environment variables
|
||||
export ADMIN_PRIVKEY="your_admin_private_key_hex"
|
||||
export RELAY_PUBKEY="relay_public_key_hex"
|
||||
export RELAY_URL="ws://localhost:8888"
|
||||
|
||||
# Run tests
|
||||
./tests/sql_test.sh
|
||||
|
||||
# Run specific test category
|
||||
./tests/sql_test.sh validation
|
||||
./tests/sql_test.sh security
|
||||
```
|
||||
|
||||
## Integration with CI/CD
|
||||
|
||||
The script should:
|
||||
- Return exit code 0 on success, 1 on failure
|
||||
- Output TAP (Test Anything Protocol) format for CI integration
|
||||
- Be runnable in automated test pipelines
|
||||
- Not require manual intervention
|
||||
|
||||
## Dependencies
|
||||
|
||||
- `bash` (version 4+)
|
||||
- `curl` or `websocat` for WebSocket communication
|
||||
- `jq` for JSON parsing
|
||||
- Nostr CLI tools (optional, for event signing)
|
||||
- Running c-relay instance
|
||||
|
||||
## Example Output
|
||||
|
||||
```
|
||||
================================
|
||||
SQL Query Admin API Tests
|
||||
================================
|
||||
|
||||
1. Query Validation Tests
|
||||
TEST: Valid SELECT query
|
||||
✓ PASS: Valid SELECT accepted
|
||||
TEST: INSERT statement blocked
|
||||
✓ PASS: INSERT correctly blocked
|
||||
TEST: UPDATE statement blocked
|
||||
✓ PASS: UPDATE correctly blocked
|
||||
|
||||
2. Query Execution Tests
|
||||
TEST: Simple SELECT query
|
||||
✓ PASS: Query executed successfully
|
||||
TEST: SELECT with WHERE clause
|
||||
✓ PASS: WHERE clause works correctly
|
||||
|
||||
...
|
||||
|
||||
================================
|
||||
Test Summary
|
||||
================================
|
||||
Tests Run: 24
|
||||
Tests Passed: 24
|
||||
Tests Failed: 0
|
||||
All tests passed!
|
||||
@@ -1,128 +0,0 @@
|
||||
# Startup Configuration Design Analysis
|
||||
|
||||
## Review of startup_config_design.md
|
||||
|
||||
### Key Design Principles Identified
|
||||
|
||||
1. **Zero Command Line Arguments**: Complete elimination of CLI arguments for true "quick start"
|
||||
2. **Event-Based Configuration**: Configuration stored as Nostr event (kind 33334) in events table
|
||||
3. **Self-Contained Database**: Database named after relay pubkey (`<pubkey>.nrdb`)
|
||||
4. **First-Time Setup**: Automatic key generation and initial configuration creation
|
||||
5. **Configuration Consistency**: Always read from event, never from hardcoded defaults
|
||||
|
||||
### Implementation Gaps and Specifications Needed
|
||||
|
||||
#### 1. Key Generation Process
|
||||
**Specification:**
|
||||
```
|
||||
First Startup Key Generation:
|
||||
1. Generate all keys on first startup (admin private/public, relay private/public)
|
||||
2. Use nostr_core_lib for key generation entropy
|
||||
3. Keys are encoded in hex format
|
||||
4. Print admin private key to stdout for user to save (never stored)
|
||||
5. Store admin public key, relay private key, and relay public key in configuration event
|
||||
6. Admin can later change the 33334 event to alter stored keys
|
||||
```
|
||||
|
||||
#### 2. Database Naming and Location
|
||||
**Specification:**
|
||||
```
|
||||
Database Naming:
|
||||
1. Database is named using relay pubkey: ./<relay_pubkey>.nrdb
|
||||
2. Database path structure: ./<relay_pubkey>.nrdb
|
||||
3. If database creation fails, program quits (can't run without database)
|
||||
4. c_nostr_relay.db should never exist in new system
|
||||
```
|
||||
|
||||
#### 3. Configuration Event Structure (Kind 33334)
|
||||
**Specification:**
|
||||
```
|
||||
Event Structure:
|
||||
- Kind: 33334 (parameterized replaceable event)
|
||||
- Event validation: Use nostr_core_lib to validate event
|
||||
- Event content field: "C Nostr Relay Configuration" (descriptive text)
|
||||
- Configuration update mechanism: TBD
|
||||
- Complete tag structure provided in configuration section below
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### 4. Configuration Change Monitoring
|
||||
**Configuration Monitoring System:**
|
||||
```
|
||||
Every event that is received is checked to see if it is a kind 33334 event from the admin pubkey.
|
||||
If so, it is processed as a configuration update.
|
||||
```
|
||||
|
||||
#### 5. Error Handling and Recovery
|
||||
**Specification:**
|
||||
```
|
||||
Error Recovery Priority:
|
||||
1. Try to load latest valid config event
|
||||
2. Generate new default configuration event if none exists
|
||||
3. Exit with error if all recovery attempts fail
|
||||
|
||||
Note: There is only ever one configuration event (parameterized replaceable event),
|
||||
so no fallback to previous versions.
|
||||
```
|
||||
|
||||
### Design Clarifications
|
||||
|
||||
**Key Management:**
|
||||
- Admin private key is never stored, only printed once at first startup
|
||||
- Single admin system (no multi-admin support)
|
||||
- No key rotation support
|
||||
|
||||
**Configuration Management:**
|
||||
- No configuration versioning/timestamping
|
||||
- No automatic backup of configuration events
|
||||
- Configuration events are not broadcastable to other relays
|
||||
- Future: Auth system to restrict admin access to configuration events
|
||||
|
||||
---
|
||||
|
||||
## Complete Current Configuration Structure
|
||||
|
||||
Based on analysis of [`src/config.c`](src/config.c:753-795), here is the complete current configuration structure that will be converted to event tags:
|
||||
|
||||
### Complete Event Structure Example
|
||||
```json
|
||||
{
|
||||
"kind": 33334,
|
||||
"created_at": 1725661483,
|
||||
"tags": [
|
||||
["d", "<relay_pubkey>"],
|
||||
["auth_enabled", "false"],
|
||||
["relay_port", "8888"],
|
||||
["max_connections", "100"],
|
||||
|
||||
["relay_description", "High-performance C Nostr relay with SQLite storage"],
|
||||
["relay_contact", ""],
|
||||
["relay_pubkey", "<relay_public_key>"],
|
||||
["relay_privkey", "<relay_private_key>"],
|
||||
["relay_software", "https://git.laantungir.net/laantungir/c-relay.git"],
|
||||
["relay_version", "v1.0.0"],
|
||||
|
||||
["pow_min_difficulty", "0"],
|
||||
["pow_mode", "basic"],
|
||||
["nip40_expiration_enabled", "true"],
|
||||
["nip40_expiration_strict", "true"],
|
||||
["nip40_expiration_filter", "true"],
|
||||
["nip40_expiration_grace_period", "300"],
|
||||
["max_subscriptions_per_client", "25"],
|
||||
["max_total_subscriptions", "5000"],
|
||||
["max_filters_per_subscription", "10"],
|
||||
["max_event_tags", "100"],
|
||||
["max_content_length", "8196"],
|
||||
["max_message_length", "16384"],
|
||||
["default_limit", "500"],
|
||||
["max_limit", "5000"]
|
||||
],
|
||||
"content": "C Nostr Relay Configuration",
|
||||
"pubkey": "<admin_public_key>",
|
||||
"id": "<computed_event_id>",
|
||||
"sig": "<event_signature>"
|
||||
}
|
||||
```
|
||||
|
||||
**Note:** The `admin_pubkey` tag is omitted as it's redundant with the event's `pubkey` field.
|
||||
532
docs/subscription_cleanup_simplified.md
Normal file
@@ -0,0 +1,532 @@
|
||||
# Subscription Cleanup - Simplified Design
|
||||
|
||||
## Problem Summary
|
||||
|
||||
The c-relay Nostr relay experienced severe performance degradation (90-100% CPU) due to subscription accumulation in the database. Investigation revealed **323,644 orphaned subscriptions** that were never properly closed when WebSocket connections dropped.
|
||||
|
||||
## Solution: Two-Component Approach
|
||||
|
||||
This simplified design focuses on two pragmatic solutions that align with Nostr's stateless design:
|
||||
|
||||
1. **Startup Cleanup**: Close all subscriptions on relay restart
|
||||
2. **Connection Age Limit**: Disconnect clients after a configurable time period
|
||||
|
||||
Both solutions force clients to reconnect and re-establish subscriptions, which is standard Nostr behavior.
|
||||
|
||||
---
|
||||
|
||||
## Component 1: Startup Cleanup
|
||||
|
||||
### Purpose
|
||||
Ensure clean state on every relay restart by closing all subscriptions in the database.
|
||||
|
||||
### Implementation
|
||||
|
||||
**File:** [`src/subscriptions.c`](src/subscriptions.c)
|
||||
|
||||
**New Function:**
|
||||
```c
|
||||
void cleanup_all_subscriptions_on_startup(void) {
|
||||
if (!g_db) {
|
||||
DEBUG_ERROR("Database not initialized for startup cleanup");
|
||||
return;
|
||||
}
|
||||
|
||||
DEBUG_LOG("Startup cleanup: Marking all active subscriptions as disconnected");
|
||||
|
||||
// Mark all 'created' subscriptions as disconnected
|
||||
const char* update_sql =
|
||||
"UPDATE subscriptions "
|
||||
"SET ended_at = strftime('%s', 'now') "
|
||||
"WHERE event_type = 'created' AND ended_at IS NULL";
|
||||
|
||||
sqlite3_stmt* stmt;
|
||||
int rc = sqlite3_prepare_v2(g_db, update_sql, -1, &stmt, NULL);
|
||||
if (rc != SQLITE_OK) {
|
||||
DEBUG_ERROR("Failed to prepare startup cleanup query: %s", sqlite3_errmsg(g_db));
|
||||
return;
|
||||
}
|
||||
|
||||
rc = sqlite3_step(stmt);
|
||||
int updated_count = sqlite3_changes(g_db);
|
||||
sqlite3_finalize(stmt);
|
||||
|
||||
if (updated_count > 0) {
|
||||
// Log a single 'disconnected' event for the startup cleanup
|
||||
const char* insert_sql =
|
||||
"INSERT INTO subscriptions (subscription_id, wsi_pointer, client_ip, event_type) "
|
||||
"VALUES ('startup_cleanup', '', 'system', 'disconnected')";
|
||||
|
||||
rc = sqlite3_prepare_v2(g_db, insert_sql, -1, &stmt, NULL);
|
||||
if (rc == SQLITE_OK) {
|
||||
sqlite3_step(stmt);
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
|
||||
DEBUG_LOG("Startup cleanup: Marked %d subscriptions as disconnected", updated_count);
|
||||
} else {
|
||||
DEBUG_LOG("Startup cleanup: No active subscriptions found");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Integration Point:** [`src/main.c:1810`](src/main.c:1810)
|
||||
|
||||
```c
|
||||
// Initialize subscription manager mutexes
|
||||
if (pthread_mutex_init(&g_subscription_manager.subscriptions_lock, NULL) != 0) {
|
||||
DEBUG_ERROR("Failed to initialize subscriptions mutex");
|
||||
sqlite3_close(g_db);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (pthread_mutex_init(&g_subscription_manager.ip_tracking_lock, NULL) != 0) {
|
||||
DEBUG_ERROR("Failed to initialize IP tracking mutex");
|
||||
pthread_mutex_destroy(&g_subscription_manager.subscriptions_lock);
|
||||
sqlite3_close(g_db);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// **NEW: Startup cleanup - close all subscriptions**
|
||||
cleanup_all_subscriptions_on_startup();
|
||||
|
||||
// Start WebSocket relay server
|
||||
DEBUG_LOG("Starting WebSocket relay server...");
|
||||
if (start_websocket_relay(port_override, strict_port) != 0) {
|
||||
DEBUG_ERROR("Failed to start WebSocket relay");
|
||||
// ... cleanup code
|
||||
}
|
||||
```
|
||||
|
||||
### Benefits
|
||||
- **Immediate relief**: Restart relay to fix subscription issues
|
||||
- **Clean slate**: Every restart starts with zero active subscriptions
|
||||
- **Simple**: Single SQL UPDATE statement
|
||||
- **Nostr-aligned**: Clients are designed to reconnect after relay restart
|
||||
- **No configuration needed**: Always runs on startup
|
||||
|
||||
---
|
||||
|
||||
## Component 2: Connection Age Limit
|
||||
|
||||
### Purpose
|
||||
Automatically disconnect clients after a configurable time period, forcing them to reconnect and re-establish subscriptions.
|
||||
|
||||
### Why Disconnect Instead of Just Closing Subscriptions?
|
||||
|
||||
**Option 1: Send CLOSED message (keep connection)**
|
||||
- ❌ Not all clients handle `CLOSED` messages properly
|
||||
- ❌ Silent failure - client thinks it's subscribed but isn't
|
||||
- ❌ Partial cleanup - connection still consumes resources
|
||||
- ❌ More complex to implement
|
||||
|
||||
**Option 2: Disconnect client entirely (force reconnection)** ✅
|
||||
- ✅ Universal compatibility - all clients handle WebSocket reconnection
|
||||
- ✅ Complete resource cleanup (memory, file descriptors, etc.)
|
||||
- ✅ Simple implementation - single operation
|
||||
- ✅ Well-tested code path (same as network interruptions)
|
||||
- ✅ Forces re-authentication if needed
|
||||
|
||||
### Implementation
|
||||
|
||||
**File:** [`src/websockets.c`](src/websockets.c)
|
||||
|
||||
**New Function:**
|
||||
```c
|
||||
/**
|
||||
* Check connection age and disconnect clients that have been connected too long.
|
||||
* This forces clients to reconnect and re-establish subscriptions.
|
||||
*
|
||||
* Uses libwebsockets' lws_vhost_foreach_wsi() to iterate through all active
|
||||
* connections and checks their connection_established timestamp from per_session_data.
|
||||
*/
|
||||
void check_connection_age(int max_connection_seconds) {
|
||||
if (max_connection_seconds <= 0 || !ws_context) {
|
||||
return;
|
||||
}
|
||||
|
||||
time_t now = time(NULL);
|
||||
time_t cutoff = now - max_connection_seconds;
|
||||
|
||||
// Get the default vhost
|
||||
struct lws_vhost *vhost = lws_get_vhost_by_name(ws_context, "default");
|
||||
if (!vhost) {
|
||||
DEBUG_ERROR("Failed to get vhost for connection age check");
|
||||
return;
|
||||
}
|
||||
|
||||
// Iterate through all active WebSocket connections
|
||||
// Note: lws_vhost_foreach_wsi() calls our callback for each connection
|
||||
struct lws *wsi = NULL;
|
||||
while ((wsi = lws_vhost_foreach_wsi(vhost, wsi)) != NULL) {
|
||||
// Get per-session data which contains connection_established timestamp
|
||||
struct per_session_data *pss = (struct per_session_data *)lws_wsi_user(wsi);
|
||||
|
||||
if (pss && pss->connection_established > 0) {
|
||||
// Check if connection is older than cutoff
|
||||
if (pss->connection_established < cutoff) {
|
||||
// Connection is too old - close it
|
||||
long age_seconds = now - pss->connection_established;
|
||||
|
||||
DEBUG_LOG("Closing connection from %s (age: %lds, limit: %ds)",
|
||||
pss->client_ip, age_seconds, max_connection_seconds);
|
||||
|
||||
// Close with normal status and reason message
|
||||
lws_close_reason(wsi, LWS_CLOSE_STATUS_NORMAL,
|
||||
(unsigned char *)"connection age limit", 21);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Key Implementation Details:**
|
||||
|
||||
1. **No database needed**: Active connections are tracked by libwebsockets itself
|
||||
2. **Uses existing timestamp**: `pss->connection_established` is already set on line 456 of websockets.c
|
||||
3. **Built-in iterator**: `lws_vhost_foreach_wsi()` safely iterates through all active connections
|
||||
4. **Per-session data**: Each connection's `per_session_data` is accessible via `lws_wsi_user()`
|
||||
5. **Safe closure**: `lws_close_reason()` properly closes the WebSocket with a status code and message
|
||||
|
||||
**Integration Point:** [`src/websockets.c:2176`](src/websockets.c:2176) - in existing event loop
|
||||
|
||||
```c
|
||||
// Main event loop with proper signal handling
|
||||
while (g_server_running && !g_shutdown_flag) {
|
||||
int result = lws_service(ws_context, 1000);
|
||||
|
||||
if (result < 0) {
|
||||
DEBUG_ERROR("libwebsockets service error");
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if it's time to post status update
|
||||
time_t current_time = time(NULL);
|
||||
int status_post_hours = get_config_int("kind_1_status_posts_hours", 0);
|
||||
|
||||
if (status_post_hours > 0) {
|
||||
int seconds_interval = status_post_hours * 3600;
|
||||
if (current_time - last_status_post_time >= seconds_interval) {
|
||||
last_status_post_time = current_time;
|
||||
generate_and_post_status_event();
|
||||
}
|
||||
}
|
||||
|
||||
// **NEW: Check for connection age limit**
|
||||
int max_connection_seconds = get_config_int("max_connection_seconds", 86400); // Default 24 hours
|
||||
if (max_connection_seconds > 0) {
|
||||
check_connection_age(max_connection_seconds);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
**Parameter:** `max_connection_seconds`
|
||||
- **Default:** `86400` (24 hours)
|
||||
- **Range:** `0` = disabled, `>0` = disconnect after X seconds
|
||||
- **Units:** Seconds (for consistency with other time-based configs)
|
||||
|
||||
**Example configurations:**
|
||||
```json
|
||||
{
|
||||
"max_connection_seconds": 86400 // 86400 seconds = 24 hours (default)
|
||||
}
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"max_connection_seconds": 43200 // 43200 seconds = 12 hours
|
||||
}
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"max_connection_seconds": 3600 // 3600 seconds = 1 hour
|
||||
}
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"max_connection_seconds": 0 // Disabled
|
||||
}
|
||||
```
|
||||
|
||||
### Client Behavior
|
||||
|
||||
When disconnected due to age limit, clients will:
|
||||
1. Detect WebSocket closure
|
||||
2. Wait briefly (exponential backoff)
|
||||
3. Reconnect to relay
|
||||
4. Re-authenticate if needed (NIP-42)
|
||||
5. Re-establish all subscriptions
|
||||
6. Resume normal operation
|
||||
|
||||
This is **exactly what happens** during network interruptions, so it's a well-tested code path in all Nostr clients.
|
||||
|
||||
### Benefits
|
||||
- **No new threads**: Uses existing event loop
|
||||
- **Minimal overhead**: Check runs once per second (same as `lws_service`)
|
||||
- **Simple implementation**: Iterate through active connections
|
||||
- **Consistent pattern**: Matches existing status post checking
|
||||
- **Universal compatibility**: All clients handle reconnection
|
||||
- **Complete cleanup**: Frees all resources associated with connection
|
||||
- **Configurable**: Can be adjusted per relay needs or disabled entirely
|
||||
|
||||
---
|
||||
|
||||
## Implementation Plan
|
||||
|
||||
### Phase 1: Startup Cleanup (1-2 hours)
|
||||
|
||||
1. **Add `cleanup_all_subscriptions_on_startup()` function**
|
||||
- File: [`src/subscriptions.c`](src/subscriptions.c)
|
||||
- SQL UPDATE to mark all active subscriptions as disconnected
|
||||
- Add logging for cleanup count
|
||||
|
||||
2. **Integrate in main()**
|
||||
- File: [`src/main.c:1810`](src/main.c:1810)
|
||||
- Call after mutex initialization, before WebSocket server start
|
||||
|
||||
3. **Test**
|
||||
- Create subscriptions in database
|
||||
- Restart relay
|
||||
- Verify all subscriptions marked as disconnected
|
||||
- Verify `active_subscriptions_log` shows 0 subscriptions
|
||||
|
||||
**Estimated Time:** 1-2 hours
|
||||
|
||||
### Phase 2: Connection Age Limit (2-3 hours)
|
||||
|
||||
1. **Add `check_connection_age()` function**
|
||||
- File: [`src/websockets.c`](src/websockets.c)
|
||||
- Iterate through active connections
|
||||
- Close connections older than limit
|
||||
|
||||
2. **Integrate in event loop**
|
||||
- File: [`src/websockets.c:2176`](src/websockets.c:2176)
|
||||
- Add check after status post check
|
||||
- Use same pattern as status posts
|
||||
|
||||
3. **Add configuration parameter**
|
||||
- Add `max_connection_seconds` to default config
|
||||
- Default: 86400 (24 hours)
|
||||
|
||||
4. **Test**
|
||||
- Connect client
|
||||
- Wait for timeout (or reduce timeout for testing)
|
||||
- Verify client disconnected
|
||||
- Verify client reconnects automatically
|
||||
- Verify subscriptions re-established
|
||||
|
||||
**Estimated Time:** 2-3 hours
|
||||
|
||||
---
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Startup Cleanup Tests
|
||||
|
||||
```bash
|
||||
# Test 1: Clean startup with existing subscriptions
|
||||
- Create 100 active subscriptions in database
|
||||
- Restart relay
|
||||
- Verify all subscriptions marked as disconnected
|
||||
- Verify active_subscriptions_log shows 0 subscriptions
|
||||
|
||||
# Test 2: Clean startup with no subscriptions
|
||||
- Start relay with empty database
|
||||
- Verify no errors
|
||||
- Verify startup cleanup logs "No active subscriptions found"
|
||||
|
||||
# Test 3: Clients reconnect after restart
|
||||
- Create subscriptions before restart
|
||||
- Restart relay
|
||||
- Connect clients and create new subscriptions
|
||||
- Verify new subscriptions tracked correctly
|
||||
```
|
||||
|
||||
### Connection Age Limit Tests
|
||||
|
||||
```bash
|
||||
# Test 1: Connection disconnected after timeout
|
||||
- Set max_connection_seconds to 60 (for testing)
|
||||
- Connect client
|
||||
- Wait 61 seconds
|
||||
- Verify client disconnected
|
||||
- Verify client reconnects automatically
|
||||
|
||||
# Test 2: Subscriptions re-established after reconnection
|
||||
- Connect client with subscriptions
|
||||
- Wait for timeout
|
||||
- Verify client reconnects
|
||||
- Verify subscriptions re-established
|
||||
- Verify events still delivered
|
||||
|
||||
# Test 3: Disabled when set to 0
|
||||
- Set max_connection_seconds to 0
|
||||
- Connect client
|
||||
- Wait extended period
|
||||
- Verify client NOT disconnected
|
||||
```
|
||||
|
||||
### Integration Tests
|
||||
|
||||
```bash
|
||||
# Test 1: Combined behavior
|
||||
- Start relay (startup cleanup runs)
|
||||
- Connect multiple clients
|
||||
- Create subscriptions
|
||||
- Wait for connection timeout
|
||||
- Verify clients reconnect
|
||||
- Restart relay
|
||||
- Verify clean state
|
||||
|
||||
# Test 2: Load test
|
||||
- Connect 100 clients
|
||||
- Each creates 5 subscriptions
|
||||
- Wait for connection timeout
|
||||
- Verify all clients reconnect
|
||||
- Verify all subscriptions re-established
|
||||
- Monitor CPU usage (should remain low)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Success Criteria
|
||||
|
||||
### Component 1: Startup Cleanup
|
||||
- ✅ Relay starts with zero active subscriptions
|
||||
- ✅ All previous subscriptions marked as disconnected on startup
|
||||
- ✅ Clients successfully reconnect and re-establish subscriptions
|
||||
- ✅ Relay restart can be used as emergency fix for subscription issues
|
||||
- ✅ No errors during startup cleanup process
|
||||
|
||||
### Component 2: Connection Age Limit
|
||||
- ✅ Clients disconnected after configured time period
|
||||
- ✅ Clients automatically reconnect
|
||||
- ✅ Subscriptions re-established after reconnection
|
||||
- ✅ No impact on relay performance
|
||||
- ✅ Configuration parameter works correctly (including disabled state)
|
||||
|
||||
### Overall Success
|
||||
- ✅ CPU usage remains low (<10%)
|
||||
- ✅ No orphaned subscriptions accumulate
|
||||
- ✅ Database size remains stable
|
||||
- ✅ No manual intervention required
|
||||
|
||||
---
|
||||
|
||||
## Configuration Reference
|
||||
|
||||
**New Configuration Parameters:**
|
||||
|
||||
```json
|
||||
{
|
||||
"max_connection_seconds": 86400
|
||||
}
|
||||
```
|
||||
|
||||
**Recommended Settings:**
|
||||
|
||||
- **Production:**
|
||||
- `max_connection_seconds: 86400` (24 hours)
|
||||
|
||||
- **Development:**
|
||||
- `max_connection_seconds: 3600` (1 hour for faster testing)
|
||||
|
||||
- **High-traffic:**
|
||||
- `max_connection_seconds: 43200` (12 hours)
|
||||
|
||||
- **Disabled:**
|
||||
- `max_connection_seconds: 0`
|
||||
|
||||
---
|
||||
|
||||
## Rollback Plan
|
||||
|
||||
If issues arise after deployment:
|
||||
|
||||
1. **Disable connection age limit:**
|
||||
- Set `max_connection_seconds: 0` in config
|
||||
- Restart relay
|
||||
- Monitor for stability
|
||||
|
||||
2. **Revert code changes:**
|
||||
- Remove `check_connection_age()` call from event loop
|
||||
- Remove `cleanup_all_subscriptions_on_startup()` call from main
|
||||
- Restart relay
|
||||
|
||||
3. **Database cleanup (if needed):**
|
||||
- Manually clean up orphaned subscriptions using SQL:
|
||||
```sql
|
||||
UPDATE subscriptions
|
||||
SET ended_at = strftime('%s', 'now')
|
||||
WHERE event_type = 'created' AND ended_at IS NULL;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Comparison with Original Design
|
||||
|
||||
### Original Design (5 Components)
|
||||
1. Startup cleanup
|
||||
2. Fix WebSocket disconnection logging
|
||||
3. Enhance subscription removal with reason parameter
|
||||
4. Periodic cleanup task (background thread)
|
||||
5. Optimize database VIEW
|
||||
6. Subscription expiration (optional)
|
||||
|
||||
### Simplified Design (2 Components)
|
||||
1. Startup cleanup
|
||||
2. Connection age limit
|
||||
|
||||
### Why Simplified is Better
|
||||
|
||||
**Advantages:**
|
||||
- **Simpler**: 2 components vs 5-6 components
|
||||
- **Faster to implement**: 3-5 hours vs 11-17 hours
|
||||
- **Easier to maintain**: Less code, fewer moving parts
|
||||
- **More reliable**: Fewer potential failure points
|
||||
- **Nostr-aligned**: Leverages client reconnection behavior
|
||||
- **No new threads**: Uses existing event loop
|
||||
- **Universal compatibility**: All clients handle reconnection
|
||||
|
||||
**What We're Not Losing:**
|
||||
- Startup cleanup is identical in both designs
|
||||
- Connection age limit achieves the same goal as periodic cleanup + expiration
|
||||
- Disconnection forces complete cleanup (better than just logging)
|
||||
- Database VIEW optimization not needed if subscriptions don't accumulate
|
||||
|
||||
**Trade-offs:**
|
||||
- Less granular logging (but simpler)
|
||||
- No historical subscription analytics (but cleaner database)
|
||||
- Clients must reconnect periodically (but this is standard Nostr behavior)
|
||||
|
||||
---
|
||||
|
||||
## Conclusion
|
||||
|
||||
This simplified design solves the subscription accumulation problem with two pragmatic solutions:
|
||||
|
||||
1. **Startup cleanup** ensures every relay restart starts with a clean slate
|
||||
2. **Connection age limit** prevents long-term accumulation by forcing periodic reconnection
|
||||
|
||||
Both solutions align with Nostr's stateless design where clients are expected to handle reconnection. The implementation is simple, maintainable, and leverages existing code patterns.
|
||||
|
||||
**Key Benefits:**
|
||||
- ✅ Solves the root problem (subscription accumulation)
|
||||
- ✅ Simple to implement (3-5 hours total)
|
||||
- ✅ Easy to maintain (minimal code)
|
||||
- ✅ Universal compatibility (all clients handle reconnection)
|
||||
- ✅ No new threads or background tasks
|
||||
- ✅ Configurable and can be disabled if needed
|
||||
- ✅ Relay restart as emergency fix
|
||||
|
||||
**Next Steps:**
|
||||
1. Implement Component 1 (Startup Cleanup)
|
||||
2. Test thoroughly
|
||||
3. Implement Component 2 (Connection Age Limit)
|
||||
4. Test thoroughly
|
||||
5. Deploy to production
|
||||
6. Monitor CPU usage and subscription counts
|
||||
209
docs/subscription_matching_debug_plan.md
Normal file
@@ -0,0 +1,209 @@
|
||||
# Subscription Matching Debug Plan
|
||||
|
||||
## Problem
|
||||
The relay is not matching kind 1059 (NIP-17 gift wrap) events to subscriptions, even though a subscription exists with `kinds:[1059]` filter. The log shows:
|
||||
```
|
||||
Event broadcast complete: 0 subscriptions matched
|
||||
```
|
||||
|
||||
But we have this subscription:
|
||||
```
|
||||
sub:3 146.70.187.119 0x78edc9b43210 8m 27s kinds:[1059], since:10/23/2025, 4:27:59 PM, limit:50
|
||||
```
|
||||
|
||||
## Investigation Strategy
|
||||
|
||||
### 1. Add Debug Output to `event_matches_filter()` (lines 386-564)
|
||||
Add debug logging at each filter check to trace the matching logic:
|
||||
|
||||
- **Entry point**: Log the event kind and filter being tested
|
||||
- **Kinds filter check** (lines 392-415): Log whether kinds filter exists, the event kind value, and each filter kind being compared
|
||||
- **Authors filter check** (lines 417-442): Log if authors filter exists and matching results
|
||||
- **IDs filter check** (lines 444-469): Log if IDs filter exists and matching results
|
||||
- **Since filter check** (lines 471-482): Log the event timestamp vs filter since value
|
||||
- **Until filter check** (lines 484-495): Log the event timestamp vs filter until value
|
||||
- **Tag filters check** (lines 497-561): Log tag filter matching details
|
||||
- **Exit point**: Log whether the overall filter matched
|
||||
|
||||
### 2. Add Debug Output to `event_matches_subscription()` (lines 567-581)
|
||||
Add logging to show:
|
||||
- How many filters are in the subscription
|
||||
- Which filter (if any) matched
|
||||
- Overall subscription match result
|
||||
|
||||
### 3. Add Debug Output to `broadcast_event_to_subscriptions()` (lines 584-726)
|
||||
Add logging to show:
|
||||
- The event being broadcast (kind, id, created_at)
|
||||
- Total number of active subscriptions being checked
|
||||
- How many subscriptions matched after the first pass
|
||||
|
||||
### 4. Key Areas to Focus On
|
||||
|
||||
Based on the code analysis, the most likely issues are:
|
||||
|
||||
1. **Kind matching logic** (lines 392-415): The event kind might not be extracted correctly, or the comparison might be failing
|
||||
2. **Since timestamp** (lines 471-482): The subscription has a `since` filter - if the event timestamp is before this, it won't match
|
||||
3. **Event structure**: The event JSON might not have the expected structure
|
||||
|
||||
### 5. Specific Debug Additions
|
||||
|
||||
#### In `event_matches_filter()` at line 386:
|
||||
```c
|
||||
// Add at start of function
|
||||
cJSON* event_kind_obj = cJSON_GetObjectItem(event, "kind");
|
||||
cJSON* event_id_obj = cJSON_GetObjectItem(event, "id");
|
||||
cJSON* event_created_at_obj = cJSON_GetObjectItem(event, "created_at");
|
||||
|
||||
DEBUG_TRACE("FILTER_MATCH: Testing event kind=%d id=%.8s created_at=%ld",
|
||||
event_kind_obj ? (int)cJSON_GetNumberValue(event_kind_obj) : -1,
|
||||
event_id_obj && cJSON_IsString(event_id_obj) ? cJSON_GetStringValue(event_id_obj) : "null",
|
||||
event_created_at_obj ? (long)cJSON_GetNumberValue(event_created_at_obj) : 0);
|
||||
```
|
||||
|
||||
#### In kinds filter check (after line 392):
|
||||
```c
|
||||
if (filter->kinds && cJSON_IsArray(filter->kinds)) {
|
||||
DEBUG_TRACE("FILTER_MATCH: Checking kinds filter with %d kinds", cJSON_GetArraySize(filter->kinds));
|
||||
|
||||
cJSON* event_kind = cJSON_GetObjectItem(event, "kind");
|
||||
if (!event_kind || !cJSON_IsNumber(event_kind)) {
|
||||
DEBUG_WARN("FILTER_MATCH: Event has no valid kind field");
|
||||
return 0;
|
||||
}
|
||||
|
||||
int event_kind_val = (int)cJSON_GetNumberValue(event_kind);
|
||||
DEBUG_TRACE("FILTER_MATCH: Event kind=%d", event_kind_val);
|
||||
|
||||
int kind_match = 0;
|
||||
cJSON* kind_item = NULL;
|
||||
cJSON_ArrayForEach(kind_item, filter->kinds) {
|
||||
if (cJSON_IsNumber(kind_item)) {
|
||||
int filter_kind = (int)cJSON_GetNumberValue(kind_item);
|
||||
DEBUG_TRACE("FILTER_MATCH: Comparing event kind %d with filter kind %d", event_kind_val, filter_kind);
|
||||
if (filter_kind == event_kind_val) {
|
||||
kind_match = 1;
|
||||
DEBUG_TRACE("FILTER_MATCH: Kind matched!");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!kind_match) {
|
||||
DEBUG_TRACE("FILTER_MATCH: No kind match, filter rejected");
|
||||
return 0;
|
||||
}
|
||||
DEBUG_TRACE("FILTER_MATCH: Kinds filter passed");
|
||||
}
|
||||
```
|
||||
|
||||
#### In since filter check (after line 472):
|
||||
```c
|
||||
if (filter->since > 0) {
|
||||
cJSON* event_created_at = cJSON_GetObjectItem(event, "created_at");
|
||||
if (!event_created_at || !cJSON_IsNumber(event_created_at)) {
|
||||
DEBUG_WARN("FILTER_MATCH: Event has no valid created_at field");
|
||||
return 0;
|
||||
}
|
||||
|
||||
long event_timestamp = (long)cJSON_GetNumberValue(event_created_at);
|
||||
DEBUG_TRACE("FILTER_MATCH: Checking since filter: event_ts=%ld filter_since=%ld",
|
||||
event_timestamp, filter->since);
|
||||
|
||||
if (event_timestamp < filter->since) {
|
||||
DEBUG_TRACE("FILTER_MATCH: Event too old (before since), filter rejected");
|
||||
return 0;
|
||||
}
|
||||
DEBUG_TRACE("FILTER_MATCH: Since filter passed");
|
||||
}
|
||||
```
|
||||
|
||||
#### At end of `event_matches_filter()` (before line 563):
|
||||
```c
|
||||
DEBUG_TRACE("FILTER_MATCH: All filters passed, event matches!");
|
||||
return 1; // All filters passed
|
||||
```
|
||||
|
||||
#### In `event_matches_subscription()` at line 567:
|
||||
```c
|
||||
int event_matches_subscription(cJSON* event, subscription_t* subscription) {
|
||||
if (!event || !subscription || !subscription->filters) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
DEBUG_TRACE("SUB_MATCH: Testing subscription '%s'", subscription->id);
|
||||
|
||||
int filter_num = 0;
|
||||
subscription_filter_t* filter = subscription->filters;
|
||||
while (filter) {
|
||||
filter_num++;
|
||||
DEBUG_TRACE("SUB_MATCH: Testing filter #%d", filter_num);
|
||||
|
||||
if (event_matches_filter(event, filter)) {
|
||||
DEBUG_TRACE("SUB_MATCH: Filter #%d matched! Subscription '%s' matches",
|
||||
filter_num, subscription->id);
|
||||
return 1; // Match found (OR logic)
|
||||
}
|
||||
filter = filter->next;
|
||||
}
|
||||
|
||||
DEBUG_TRACE("SUB_MATCH: No filters matched for subscription '%s'", subscription->id);
|
||||
return 0; // No filters matched
|
||||
}
|
||||
```
|
||||
|
||||
#### In `broadcast_event_to_subscriptions()` at line 584:
|
||||
```c
|
||||
int broadcast_event_to_subscriptions(cJSON* event) {
|
||||
if (!event) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Log event details
|
||||
cJSON* event_kind = cJSON_GetObjectItem(event, "kind");
|
||||
cJSON* event_id = cJSON_GetObjectItem(event, "id");
|
||||
cJSON* event_created_at = cJSON_GetObjectItem(event, "created_at");
|
||||
|
||||
DEBUG_TRACE("BROADCAST: Event kind=%d id=%.8s created_at=%ld",
|
||||
event_kind ? (int)cJSON_GetNumberValue(event_kind) : -1,
|
||||
event_id && cJSON_IsString(event_id) ? cJSON_GetStringValue(event_id) : "null",
|
||||
event_created_at ? (long)cJSON_GetNumberValue(event_created_at) : 0);
|
||||
|
||||
// ... existing expiration check code ...
|
||||
|
||||
// After line 611 (before pthread_mutex_lock):
|
||||
pthread_mutex_lock(&g_subscription_manager.subscriptions_lock);
|
||||
|
||||
int total_subs = 0;
|
||||
subscription_t* count_sub = g_subscription_manager.active_subscriptions;
|
||||
while (count_sub) {
|
||||
total_subs++;
|
||||
count_sub = count_sub->next;
|
||||
}
|
||||
DEBUG_TRACE("BROADCAST: Checking %d active subscriptions", total_subs);
|
||||
|
||||
subscription_t* sub = g_subscription_manager.active_subscriptions;
|
||||
// ... rest of matching logic ...
|
||||
```
|
||||
|
||||
## Expected Outcome
|
||||
|
||||
With these debug additions, we should see output like:
|
||||
```
|
||||
BROADCAST: Event kind=1059 id=abc12345 created_at=1729712279
|
||||
BROADCAST: Checking 1 active subscriptions
|
||||
SUB_MATCH: Testing subscription 'sub:3'
|
||||
SUB_MATCH: Testing filter #1
|
||||
FILTER_MATCH: Testing event kind=1059 id=abc12345 created_at=1729712279
|
||||
FILTER_MATCH: Checking kinds filter with 1 kinds
|
||||
FILTER_MATCH: Event kind=1059
|
||||
FILTER_MATCH: Comparing event kind 1059 with filter kind 1059
|
||||
FILTER_MATCH: Kind matched!
|
||||
FILTER_MATCH: Kinds filter passed
|
||||
FILTER_MATCH: Checking since filter: event_ts=1729712279 filter_since=1729708079
|
||||
FILTER_MATCH: Since filter passed
|
||||
FILTER_MATCH: All filters passed, event matches!
|
||||
SUB_MATCH: Filter #1 matched! Subscription 'sub:3' matches
|
||||
Event broadcast complete: 1 subscriptions matched
|
||||
```
|
||||
|
||||
This will help us identify exactly where the matching is failing.
|
||||
358
docs/subscription_table_collapse_plan.md
Normal file
@@ -0,0 +1,358 @@
|
||||
# Subscription Table Collapsible Groups Implementation Plan
|
||||
|
||||
## Objective
|
||||
Add collapsible/expandable functionality to subscription groups, where:
|
||||
1. Each WSI Pointer group starts collapsed, showing only a summary row
|
||||
2. Clicking the summary row expands to show all subscription details for that WSI Pointer
|
||||
3. Clicking again collapses the group back to the summary row
|
||||
|
||||
## Current Behavior
|
||||
- All subscriptions are displayed in a flat list
|
||||
- WSI Pointer value shown only on first row of each group
|
||||
- No interaction or collapse functionality
|
||||
|
||||
## Desired Behavior
|
||||
- **Collapsed state**: One row per WSI Pointer showing summary information
|
||||
- **Expanded state**: Header row + detail rows for each subscription in that group
|
||||
- **Toggle**: Click on header row to expand/collapse
|
||||
- **Visual indicator**: Arrow or icon showing expand/collapse state
|
||||
|
||||
## Design Approach
|
||||
|
||||
### Option 1: Summary Row + Detail Rows (Recommended)
|
||||
```
|
||||
[▶] WSI Pointer: 0x12345678 | Subscriptions: 3 | Total Duration: 15m
|
||||
(detail rows hidden)
|
||||
|
||||
When clicked:
|
||||
[▼] WSI Pointer: 0x12345678 | Subscriptions: 3 | Total Duration: 15m
|
||||
| sub-001 | 5m 30s | kinds:[1]
|
||||
| sub-002 | 3m 15s | kinds:[3]
|
||||
| sub-003 | 6m 15s | kinds:[1,3]
|
||||
```
|
||||
|
||||
### Option 2: First Row as Header (Alternative)
|
||||
```
|
||||
[▶] 0x12345678 | sub-001 | 5m 30s | kinds:[1] (+ 2 more)
|
||||
|
||||
When clicked:
|
||||
[▼] 0x12345678 | sub-001 | 5m 30s | kinds:[1]
|
||||
| sub-002 | 3m 15s | kinds:[3]
|
||||
| sub-003 | 6m 15s | kinds:[1,3]
|
||||
```
|
||||
|
||||
**Recommendation**: Option 1 provides clearer UX and better summary information.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Files to Modify
|
||||
1. `api/index.js` - Function `populateSubscriptionDetailsTable()` (lines 4277-4384)
|
||||
2. `api/index.css` - Add styles for collapsible rows
|
||||
|
||||
### Data Structure Changes
|
||||
|
||||
Need to group subscriptions by WSI Pointer first:
|
||||
```javascript
|
||||
// Group subscriptions by wsi_pointer
|
||||
const groupedSubscriptions = {};
|
||||
subscriptionsData.forEach(sub => {
|
||||
const wsiKey = sub.wsi_pointer || 'N/A';
|
||||
if (!groupedSubscriptions[wsiKey]) {
|
||||
groupedSubscriptions[wsiKey] = [];
|
||||
}
|
||||
groupedSubscriptions[wsiKey].push(sub);
|
||||
});
|
||||
```
|
||||
|
||||
### HTML Structure
|
||||
|
||||
#### Summary Row (Header)
|
||||
```html
|
||||
<tr class="subscription-group-header" data-wsi-pointer="0x12345678" data-expanded="false">
|
||||
<td colspan="4" style="cursor: pointer; user-select: none;">
|
||||
<span class="expand-icon">▶</span>
|
||||
<strong>WSI Pointer:</strong> 0x12345678
|
||||
<span class="group-summary">
|
||||
| Subscriptions: 3 | Oldest: 15m 30s
|
||||
</span>
|
||||
</td>
|
||||
</tr>
|
||||
```
|
||||
|
||||
#### Detail Rows (Initially Hidden)
|
||||
```html
|
||||
<tr class="subscription-detail-row" data-wsi-group="0x12345678" style="display: none;">
|
||||
<td style="padding-left: 30px;"><!-- Empty for WSI Pointer --></td>
|
||||
<td>sub-001</td>
|
||||
<td>5m 30s</td>
|
||||
<td>kinds:[1]</td>
|
||||
</tr>
|
||||
```
|
||||
|
||||
### JavaScript Implementation
|
||||
|
||||
#### Modified populateSubscriptionDetailsTable Function
|
||||
|
||||
```javascript
|
||||
function populateSubscriptionDetailsTable(subscriptionsData) {
|
||||
const tableBody = document.getElementById('subscription-details-table-body');
|
||||
if (!tableBody || !subscriptionsData || !Array.isArray(subscriptionsData)) return;
|
||||
|
||||
tableBody.innerHTML = '';
|
||||
|
||||
if (subscriptionsData.length === 0) {
|
||||
const row = document.createElement('tr');
|
||||
row.innerHTML = '<td colspan="4" style="text-align: center; font-style: italic;">No active subscriptions</td>';
|
||||
tableBody.appendChild(row);
|
||||
return;
|
||||
}
|
||||
|
||||
// Sort subscriptions by wsi_pointer to group them together
|
||||
subscriptionsData.sort((a, b) => {
|
||||
const wsiA = a.wsi_pointer || '';
|
||||
const wsiB = b.wsi_pointer || '';
|
||||
return wsiA.localeCompare(wsiB);
|
||||
});
|
||||
|
||||
// Group subscriptions by wsi_pointer
|
||||
const groupedSubscriptions = {};
|
||||
subscriptionsData.forEach(sub => {
|
||||
const wsiKey = sub.wsi_pointer || 'N/A';
|
||||
if (!groupedSubscriptions[wsiKey]) {
|
||||
groupedSubscriptions[wsiKey] = [];
|
||||
}
|
||||
groupedSubscriptions[wsiKey].push(sub);
|
||||
});
|
||||
|
||||
// Create rows for each group
|
||||
Object.entries(groupedSubscriptions).forEach(([wsiPointer, subscriptions]) => {
|
||||
// Calculate group summary
|
||||
const subCount = subscriptions.length;
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const oldestDuration = Math.max(...subscriptions.map(s => now - s.created_at));
|
||||
const oldestDurationStr = formatDuration(oldestDuration);
|
||||
|
||||
// Create header row (summary)
|
||||
const headerRow = document.createElement('tr');
|
||||
headerRow.className = 'subscription-group-header';
|
||||
headerRow.setAttribute('data-wsi-pointer', wsiPointer);
|
||||
headerRow.setAttribute('data-expanded', 'false');
|
||||
headerRow.style.cursor = 'pointer';
|
||||
headerRow.style.userSelect = 'none';
|
||||
headerRow.style.backgroundColor = 'var(--hover-color, #f5f5f5)';
|
||||
|
||||
headerRow.innerHTML = `
|
||||
<td colspan="4" style="padding: 8px;">
|
||||
<span class="expand-icon" style="display: inline-block; width: 20px; transition: transform 0.2s;">▶</span>
|
||||
<strong style="font-family: 'Courier New', monospace; font-size: 12px;">WSI: ${wsiPointer}</strong>
|
||||
<span style="color: #666; margin-left: 15px;">
|
||||
Subscriptions: ${subCount} | Oldest: ${oldestDurationStr}
|
||||
</span>
|
||||
</td>
|
||||
`;
|
||||
|
||||
// Add click handler to toggle expansion
|
||||
headerRow.addEventListener('click', () => toggleSubscriptionGroup(wsiPointer));
|
||||
|
||||
tableBody.appendChild(headerRow);
|
||||
|
||||
// Create detail rows (initially hidden)
|
||||
subscriptions.forEach((subscription, index) => {
|
||||
const detailRow = document.createElement('tr');
|
||||
detailRow.className = 'subscription-detail-row';
|
||||
detailRow.setAttribute('data-wsi-group', wsiPointer);
|
||||
detailRow.style.display = 'none';
|
||||
|
||||
// Calculate duration
|
||||
const duration = now - subscription.created_at;
|
||||
const durationStr = formatDuration(duration);
|
||||
|
||||
// Format filters
|
||||
let filtersDisplay = 'None';
|
||||
if (subscription.filters && subscription.filters.length > 0) {
|
||||
const filterDetails = [];
|
||||
subscription.filters.forEach((filter) => {
|
||||
const parts = [];
|
||||
|
||||
if (filter.kinds && Array.isArray(filter.kinds) && filter.kinds.length > 0) {
|
||||
parts.push(`kinds:[${filter.kinds.join(',')}]`);
|
||||
}
|
||||
|
||||
if (filter.authors && Array.isArray(filter.authors) && filter.authors.length > 0) {
|
||||
const authorCount = filter.authors.length;
|
||||
if (authorCount === 1) {
|
||||
const shortPubkey = filter.authors[0].substring(0, 8) + '...';
|
||||
parts.push(`authors:[${shortPubkey}]`);
|
||||
} else {
|
||||
parts.push(`authors:[${authorCount} pubkeys]`);
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.ids && Array.isArray(filter.ids) && filter.ids.length > 0) {
|
||||
const idCount = filter.ids.length;
|
||||
parts.push(`ids:[${idCount} event${idCount > 1 ? 's' : ''}]`);
|
||||
}
|
||||
|
||||
const timeParts = [];
|
||||
if (filter.since && filter.since > 0) {
|
||||
const sinceDate = new Date(filter.since * 1000).toLocaleString();
|
||||
timeParts.push(`since:${sinceDate}`);
|
||||
}
|
||||
if (filter.until && filter.until > 0) {
|
||||
const untilDate = new Date(filter.until * 1000).toLocaleString();
|
||||
timeParts.push(`until:${untilDate}`);
|
||||
}
|
||||
if (timeParts.length > 0) {
|
||||
parts.push(timeParts.join(', '));
|
||||
}
|
||||
|
||||
if (filter.limit && filter.limit > 0) {
|
||||
parts.push(`limit:${filter.limit}`);
|
||||
}
|
||||
|
||||
if (filter.tag_filters && Array.isArray(filter.tag_filters) && filter.tag_filters.length > 0) {
|
||||
parts.push(`tags:[${filter.tag_filters.length} filter${filter.tag_filters.length > 1 ? 's' : ''}]`);
|
||||
}
|
||||
|
||||
if (parts.length > 0) {
|
||||
filterDetails.push(parts.join(', '));
|
||||
} else {
|
||||
filterDetails.push('empty filter');
|
||||
}
|
||||
});
|
||||
|
||||
filtersDisplay = filterDetails.join(' | ');
|
||||
}
|
||||
|
||||
detailRow.innerHTML = `
|
||||
<td style="padding-left: 30px; font-family: 'Courier New', monospace; font-size: 11px; color: #999;">└─</td>
|
||||
<td style="font-family: 'Courier New', monospace; font-size: 12px;">${subscription.id || 'N/A'}</td>
|
||||
<td>${durationStr}</td>
|
||||
<td>${filtersDisplay}</td>
|
||||
`;
|
||||
|
||||
tableBody.appendChild(detailRow);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Toggle function for expanding/collapsing groups
|
||||
function toggleSubscriptionGroup(wsiPointer) {
|
||||
const headerRow = document.querySelector(`.subscription-group-header[data-wsi-pointer="${wsiPointer}"]`);
|
||||
const detailRows = document.querySelectorAll(`.subscription-detail-row[data-wsi-group="${wsiPointer}"]`);
|
||||
const expandIcon = headerRow.querySelector('.expand-icon');
|
||||
|
||||
const isExpanded = headerRow.getAttribute('data-expanded') === 'true';
|
||||
|
||||
if (isExpanded) {
|
||||
// Collapse
|
||||
detailRows.forEach(row => row.style.display = 'none');
|
||||
expandIcon.textContent = '▶';
|
||||
expandIcon.style.transform = 'rotate(0deg)';
|
||||
headerRow.setAttribute('data-expanded', 'false');
|
||||
} else {
|
||||
// Expand
|
||||
detailRows.forEach(row => row.style.display = 'table-row');
|
||||
expandIcon.textContent = '▼';
|
||||
expandIcon.style.transform = 'rotate(90deg)';
|
||||
headerRow.setAttribute('data-expanded', 'true');
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### CSS Additions (api/index.css)
|
||||
|
||||
```css
|
||||
/* Subscription group header styles */
|
||||
.subscription-group-header {
|
||||
background-color: var(--hover-color, #f5f5f5);
|
||||
font-weight: 500;
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
|
||||
.subscription-group-header:hover {
|
||||
background-color: var(--primary-color-light, #e8e8e8);
|
||||
}
|
||||
|
||||
.expand-icon {
|
||||
display: inline-block;
|
||||
width: 20px;
|
||||
transition: transform 0.2s ease;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
/* Detail row styles */
|
||||
.subscription-detail-row {
|
||||
background-color: var(--background-color, #ffffff);
|
||||
}
|
||||
|
||||
.subscription-detail-row:hover {
|
||||
background-color: var(--hover-color-light, #fafafa);
|
||||
}
|
||||
|
||||
/* Dark mode support */
|
||||
.dark-mode .subscription-group-header {
|
||||
background-color: var(--hover-color-dark, #2a2a2a);
|
||||
}
|
||||
|
||||
.dark-mode .subscription-group-header:hover {
|
||||
background-color: var(--primary-color-dark, #333333);
|
||||
}
|
||||
|
||||
.dark-mode .subscription-detail-row {
|
||||
background-color: var(--background-color-dark, #1a1a1a);
|
||||
}
|
||||
|
||||
.dark-mode .subscription-detail-row:hover {
|
||||
background-color: var(--hover-color-dark, #252525);
|
||||
}
|
||||
```
|
||||
|
||||
## Features Included
|
||||
|
||||
1. ✅ **Collapsible groups**: Each WSI Pointer group can be collapsed/expanded
|
||||
2. ✅ **Visual indicator**: Arrow icon (▶/▼) shows current state
|
||||
3. ✅ **Summary information**: Shows subscription count and oldest duration
|
||||
4. ✅ **Smooth transitions**: Icon rotation animation
|
||||
5. ✅ **Hover effects**: Visual feedback on header rows
|
||||
6. ✅ **Tree structure**: Detail rows indented with └─ character
|
||||
7. ✅ **Dark mode support**: Proper styling for both themes
|
||||
8. ✅ **Keyboard accessible**: Can be enhanced with keyboard navigation
|
||||
|
||||
## User Experience Flow
|
||||
|
||||
1. **Initial load**: All groups collapsed, showing summary rows only
|
||||
2. **Click header**: Group expands, showing all subscription details
|
||||
3. **Click again**: Group collapses back to summary
|
||||
4. **Multiple groups**: Each group can be independently expanded/collapsed
|
||||
5. **Visual feedback**: Hover effects and smooth animations
|
||||
|
||||
## Testing Checklist
|
||||
|
||||
1. ✅ Verify groups start collapsed by default
|
||||
2. ✅ Verify clicking header expands group
|
||||
3. ✅ Verify clicking again collapses group
|
||||
4. ✅ Verify multiple groups can be expanded simultaneously
|
||||
5. ✅ Verify summary information is accurate
|
||||
6. ✅ Verify detail rows display correctly when expanded
|
||||
7. ✅ Verify styling works in both light and dark modes
|
||||
8. ✅ Verify no console errors
|
||||
9. ✅ Verify performance with many subscriptions
|
||||
|
||||
## Optional Enhancements
|
||||
|
||||
1. **Expand/Collapse All**: Add buttons to expand or collapse all groups at once
|
||||
2. **Remember State**: Store expansion state in localStorage
|
||||
3. **Keyboard Navigation**: Add keyboard shortcuts (Space/Enter to toggle)
|
||||
4. **Animation**: Add slide-down/up animation for detail rows
|
||||
5. **Search/Filter**: Add ability to search within subscriptions
|
||||
6. **Export**: Add ability to export subscription data
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. Review this plan
|
||||
2. Switch to Code mode
|
||||
3. Implement the changes in `api/index.js`
|
||||
4. Add CSS styles in `api/index.css`
|
||||
5. Test the collapsible functionality
|
||||
6. Verify all edge cases work correctly
|
||||
155
docs/subscription_table_grouping_plan.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# Subscription Table WSI Pointer Grouping Implementation Plan
|
||||
|
||||
## Objective
|
||||
Modify the subscription details table to show the WSI Pointer value only once per group - on the first row of each WSI Pointer group, leaving it blank for subsequent rows with the same WSI Pointer.
|
||||
|
||||
## Current Behavior
|
||||
- All rows show the WSI Pointer value
|
||||
- Rows are sorted by WSI Pointer (grouping is working)
|
||||
- Visual grouping is not clear
|
||||
|
||||
## Desired Behavior
|
||||
- First row of each WSI Pointer group shows the full WSI Pointer value
|
||||
- Subsequent rows in the same group have an empty cell for WSI Pointer
|
||||
- This creates a clear visual grouping effect
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### File to Modify
|
||||
`api/index.js` - Function `populateSubscriptionDetailsTable()` (lines 4277-4384)
|
||||
|
||||
### Code Changes Required
|
||||
|
||||
#### Current Code (lines 4291-4383):
|
||||
```javascript
|
||||
// Sort subscriptions by wsi_pointer to group them together
|
||||
subscriptionsData.sort((a, b) => {
|
||||
const wsiA = a.wsi_pointer || '';
|
||||
const wsiB = b.wsi_pointer || '';
|
||||
return wsiA.localeCompare(wsiB);
|
||||
});
|
||||
|
||||
subscriptionsData.forEach((subscription, index) => {
|
||||
const row = document.createElement('tr');
|
||||
|
||||
// Calculate duration
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const duration = now - subscription.created_at;
|
||||
const durationStr = formatDuration(duration);
|
||||
|
||||
// Format client IP (show full IP for admin view)
|
||||
const clientIP = subscription.client_ip || 'unknown';
|
||||
|
||||
// Format wsi_pointer (show full pointer)
|
||||
const wsiPointer = subscription.wsi_pointer || 'N/A';
|
||||
|
||||
// Format filters (show actual filter details)
|
||||
let filtersDisplay = 'None';
|
||||
// ... filter formatting code ...
|
||||
|
||||
row.innerHTML = `
|
||||
<td style="font-family: 'Courier New', monospace; font-size: 12px;">${wsiPointer}</td>
|
||||
<td style="font-family: 'Courier New', monospace; font-size: 12px;">${subscription.id || 'N/A'}</td>
|
||||
<!-- <td style="font-family: 'Courier New', monospace; font-size: 12px;">${clientIP}</td> -->
|
||||
<td>${durationStr}</td>
|
||||
<td>${filtersDisplay}</td>
|
||||
`;
|
||||
tableBody.appendChild(row);
|
||||
});
|
||||
```
|
||||
|
||||
#### Modified Code:
|
||||
```javascript
|
||||
// Sort subscriptions by wsi_pointer to group them together
|
||||
subscriptionsData.sort((a, b) => {
|
||||
const wsiA = a.wsi_pointer || '';
|
||||
const wsiB = b.wsi_pointer || '';
|
||||
return wsiA.localeCompare(wsiB);
|
||||
});
|
||||
|
||||
// Track previous WSI Pointer to detect group changes
|
||||
let previousWsiPointer = null;
|
||||
|
||||
subscriptionsData.forEach((subscription, index) => {
|
||||
const row = document.createElement('tr');
|
||||
|
||||
// Calculate duration
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const duration = now - subscription.created_at;
|
||||
const durationStr = formatDuration(duration);
|
||||
|
||||
// Format client IP (show full IP for admin view)
|
||||
const clientIP = subscription.client_ip || 'unknown';
|
||||
|
||||
// Format wsi_pointer - only show if it's different from previous row
|
||||
const currentWsiPointer = subscription.wsi_pointer || 'N/A';
|
||||
let wsiPointerDisplay = '';
|
||||
|
||||
if (currentWsiPointer !== previousWsiPointer) {
|
||||
// This is the first row of a new group - show the WSI Pointer
|
||||
wsiPointerDisplay = currentWsiPointer;
|
||||
previousWsiPointer = currentWsiPointer;
|
||||
} else {
|
||||
// This is a continuation of the same group - leave blank
|
||||
wsiPointerDisplay = '';
|
||||
}
|
||||
|
||||
// Format filters (show actual filter details)
|
||||
let filtersDisplay = 'None';
|
||||
// ... filter formatting code remains the same ...
|
||||
|
||||
row.innerHTML = `
|
||||
<td style="font-family: 'Courier New', monospace; font-size: 12px;">${wsiPointerDisplay}</td>
|
||||
<td style="font-family: 'Courier New', monospace; font-size: 12px;">${subscription.id || 'N/A'}</td>
|
||||
<!-- <td style="font-family: 'Courier New', monospace; font-size: 12px;">${clientIP}</td> -->
|
||||
<td>${durationStr}</td>
|
||||
<td>${filtersDisplay}</td>
|
||||
`;
|
||||
tableBody.appendChild(row);
|
||||
});
|
||||
```
|
||||
|
||||
### Key Changes Explained
|
||||
|
||||
1. **Add tracking variable**: `let previousWsiPointer = null;` before the forEach loop
|
||||
2. **Store current WSI Pointer**: `const currentWsiPointer = subscription.wsi_pointer || 'N/A';`
|
||||
3. **Compare with previous**: Check if `currentWsiPointer !== previousWsiPointer`
|
||||
4. **Conditional display**:
|
||||
- If different: Show the WSI Pointer value and update `previousWsiPointer`
|
||||
- If same: Show empty string (blank cell)
|
||||
5. **Use display variable**: Replace `${wsiPointer}` with `${wsiPointerDisplay}` in the row HTML
|
||||
|
||||
### Visual Result
|
||||
|
||||
**Before:**
|
||||
```
|
||||
WSI Pointer | Subscription ID | Duration | Filters
|
||||
0x12345678 | sub-001 | 5m 30s | kinds:[1]
|
||||
0x12345678 | sub-002 | 3m 15s | kinds:[3]
|
||||
0x87654321 | sub-003 | 1m 45s | kinds:[1,3]
|
||||
```
|
||||
|
||||
**After:**
|
||||
```
|
||||
WSI Pointer | Subscription ID | Duration | Filters
|
||||
0x12345678 | sub-001 | 5m 30s | kinds:[1]
|
||||
| sub-002 | 3m 15s | kinds:[3]
|
||||
0x87654321 | sub-003 | 1m 45s | kinds:[1,3]
|
||||
```
|
||||
|
||||
## Testing Checklist
|
||||
|
||||
1. ✅ Verify first row of each group shows WSI Pointer
|
||||
2. ✅ Verify subsequent rows in same group are blank
|
||||
3. ✅ Verify grouping works with multiple subscriptions per WSI Pointer
|
||||
4. ✅ Verify single subscription per WSI Pointer still shows the value
|
||||
5. ✅ Verify empty/null WSI Pointers are handled correctly
|
||||
6. ✅ Verify table still displays correctly when no subscriptions exist
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. Review this plan
|
||||
2. Switch to Code mode
|
||||
3. Implement the changes in `api/index.js`
|
||||
4. Test the implementation
|
||||
5. Verify the visual grouping effect
|
||||
200
docs/websocket_write_queue_design.md
Normal file
@@ -0,0 +1,200 @@
|
||||
# WebSocket Write Queue Design
|
||||
|
||||
## Problem Statement
|
||||
|
||||
The current partial write handling implementation uses a single buffer per session, which fails when multiple events need to be sent to the same client in rapid succession. This causes:
|
||||
|
||||
1. First event gets partial write → queued successfully
|
||||
2. Second event tries to write → **FAILS** with "write already pending"
|
||||
3. Subsequent events fail similarly, causing data loss
|
||||
|
||||
### Server Log Evidence
|
||||
```
|
||||
[WARN] WS_FRAME_PARTIAL: EVENT partial write, sub=1 sent=3210 expected=5333
|
||||
[TRACE] Queued partial write: len=2123
|
||||
[WARN] WS_FRAME_PARTIAL: EVENT partial write, sub=1 sent=3210 expected=5333
|
||||
[WARN] queue_websocket_write: write already pending, cannot queue new write
|
||||
[ERROR] Failed to queue partial EVENT write for sub=1
|
||||
```
|
||||
|
||||
## Root Cause
|
||||
|
||||
WebSocket frames must be sent **atomically** - you cannot interleave multiple frames. The current single-buffer approach correctly enforces this, but it rejects new writes instead of queuing them.
|
||||
|
||||
## Solution: Write Queue Architecture
|
||||
|
||||
### Design Principles
|
||||
|
||||
1. **Frame Atomicity**: Complete one WebSocket frame before starting the next
|
||||
2. **Sequential Processing**: Process queued writes in FIFO order
|
||||
3. **Memory Safety**: Proper cleanup on connection close or errors
|
||||
4. **Thread Safety**: Protect queue operations with existing session lock
|
||||
|
||||
### Data Structures
|
||||
|
||||
#### Write Queue Node
|
||||
```c
|
||||
struct write_queue_node {
|
||||
unsigned char* buffer; // Buffer with LWS_PRE space
|
||||
size_t total_len; // Total length of data to write
|
||||
size_t offset; // How much has been written so far
|
||||
int write_type; // LWS_WRITE_TEXT, etc.
|
||||
struct write_queue_node* next; // Next node in queue
|
||||
};
|
||||
```
|
||||
|
||||
#### Per-Session Write Queue
|
||||
```c
|
||||
struct per_session_data {
|
||||
// ... existing fields ...
|
||||
|
||||
// Write queue for handling multiple pending writes
|
||||
struct write_queue_node* write_queue_head; // First item to write
|
||||
struct write_queue_node* write_queue_tail; // Last item in queue
|
||||
int write_queue_length; // Number of items in queue
|
||||
int write_in_progress; // Flag: 1 if currently writing
|
||||
};
|
||||
```
|
||||
|
||||
### Algorithm Flow
|
||||
|
||||
#### 1. Enqueue Write (`queue_websocket_write`)
|
||||
|
||||
```
|
||||
IF write_queue is empty AND no write in progress:
|
||||
- Attempt immediate write with lws_write()
|
||||
- IF complete:
|
||||
- Return success
|
||||
- ELSE (partial write):
|
||||
- Create queue node with remaining data
|
||||
- Add to queue
|
||||
- Set write_in_progress flag
|
||||
- Request LWS_CALLBACK_SERVER_WRITEABLE
|
||||
ELSE:
|
||||
- Create queue node with full data
|
||||
- Append to queue tail
|
||||
- IF no write in progress:
|
||||
- Request LWS_CALLBACK_SERVER_WRITEABLE
|
||||
```
|
||||
|
||||
#### 2. Process Queue (`process_pending_write`)
|
||||
|
||||
```
|
||||
WHILE write_queue is not empty:
|
||||
- Get head node
|
||||
- Calculate remaining data (total_len - offset)
|
||||
- Attempt write with lws_write()
|
||||
|
||||
IF write fails (< 0):
|
||||
- Log error
|
||||
- Remove and free head node
|
||||
- Continue to next node
|
||||
|
||||
ELSE IF partial write (< remaining):
|
||||
- Update offset
|
||||
- Request LWS_CALLBACK_SERVER_WRITEABLE
|
||||
- Break (wait for next callback)
|
||||
|
||||
ELSE (complete write):
|
||||
- Remove and free head node
|
||||
- Continue to next node
|
||||
|
||||
IF queue is empty:
|
||||
- Clear write_in_progress flag
|
||||
```
|
||||
|
||||
#### 3. Cleanup (`LWS_CALLBACK_CLOSED`)
|
||||
|
||||
```
|
||||
WHILE write_queue is not empty:
|
||||
- Get head node
|
||||
- Free buffer
|
||||
- Free node
|
||||
- Move to next
|
||||
Clear queue pointers
|
||||
```
|
||||
|
||||
### Memory Management
|
||||
|
||||
1. **Allocation**: Each queue node allocates buffer with `LWS_PRE + data_len`
|
||||
2. **Ownership**: Queue owns all buffers until write completes or connection closes
|
||||
3. **Deallocation**: Free buffer and node when:
|
||||
- Write completes successfully
|
||||
- Write fails with error
|
||||
- Connection closes
|
||||
|
||||
### Thread Safety
|
||||
|
||||
- Use existing `pss->session_lock` to protect queue operations
|
||||
- Lock during:
|
||||
- Enqueue operations
|
||||
- Dequeue operations
|
||||
- Queue traversal for cleanup
|
||||
|
||||
### Performance Considerations
|
||||
|
||||
1. **Queue Length Limit**: Implement max queue length (e.g., 100 items) to prevent memory exhaustion
|
||||
2. **Memory Pressure**: Monitor total queued bytes per session
|
||||
3. **Backpressure**: If queue exceeds limit, close connection with NOTICE
|
||||
|
||||
### Error Handling
|
||||
|
||||
1. **Allocation Failure**: Return error, log, send NOTICE to client
|
||||
2. **Write Failure**: Remove failed frame, continue with next
|
||||
3. **Queue Overflow**: Close connection with appropriate NOTICE
|
||||
|
||||
## Implementation Plan
|
||||
|
||||
### Phase 1: Data Structure Changes
|
||||
1. Add `write_queue_node` structure to `websockets.h`
|
||||
2. Update `per_session_data` with queue fields
|
||||
3. Remove old single-buffer fields
|
||||
|
||||
### Phase 2: Queue Operations
|
||||
1. Implement `enqueue_write()` helper
|
||||
2. Implement `dequeue_write()` helper
|
||||
3. Update `queue_websocket_write()` to use queue
|
||||
4. Update `process_pending_write()` to process queue
|
||||
|
||||
### Phase 3: Integration
|
||||
1. Update all `lws_write()` call sites
|
||||
2. Update `LWS_CALLBACK_CLOSED` cleanup
|
||||
3. Add queue length monitoring
|
||||
|
||||
### Phase 4: Testing
|
||||
1. Test with rapid multiple events to same client
|
||||
2. Test with large events (>4KB)
|
||||
3. Test under load with concurrent connections
|
||||
4. Verify no "Invalid frame header" errors
|
||||
|
||||
## Expected Outcomes
|
||||
|
||||
1. **No More Rejections**: All writes queued successfully
|
||||
2. **Frame Integrity**: Complete frames sent atomically
|
||||
3. **Memory Safety**: Proper cleanup on all paths
|
||||
4. **Performance**: Minimal overhead for queue management
|
||||
|
||||
## Metrics to Monitor
|
||||
|
||||
1. Average queue length per session
|
||||
2. Maximum queue length observed
|
||||
3. Queue overflow events (if limit implemented)
|
||||
4. Write completion rate
|
||||
5. Partial write frequency
|
||||
|
||||
## Alternative Approaches Considered
|
||||
|
||||
### 1. Larger Single Buffer
|
||||
**Rejected**: Doesn't solve the fundamental problem of multiple concurrent writes
|
||||
|
||||
### 2. Immediate Write Retry
|
||||
**Rejected**: Could cause busy-waiting and CPU waste
|
||||
|
||||
### 3. Drop Frames on Conflict
|
||||
**Rejected**: Violates reliability requirements
|
||||
|
||||
## References
|
||||
|
||||
- libwebsockets documentation on `lws_write()` and `LWS_CALLBACK_SERVER_WRITEABLE`
|
||||
- WebSocket RFC 6455 on frame structure
|
||||
- Nostr NIP-01 on relay-to-client communication
|
||||
570
increment_and_push.sh
Executable file
@@ -0,0 +1,570 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
print_status() { echo -e "${BLUE}[INFO]${NC} $1" >&2; }
|
||||
print_success() { echo -e "${GREEN}[SUCCESS]${NC} $1" >&2; }
|
||||
print_warning() { echo -e "${YELLOW}[WARNING]${NC} $1" >&2; }
|
||||
print_error() { echo -e "${RED}[ERROR]${NC} $1" >&2; }
|
||||
|
||||
# Global variables
|
||||
COMMIT_MESSAGE=""
|
||||
RELEASE_MODE=false
|
||||
VERSION_INCREMENT_TYPE="patch" # "patch", "minor", or "major"
|
||||
|
||||
show_usage() {
|
||||
echo "C-Relay Increment and Push Script"
|
||||
echo ""
|
||||
echo "USAGE:"
|
||||
echo " $0 [OPTIONS] \"commit message\""
|
||||
echo ""
|
||||
echo "COMMANDS:"
|
||||
echo " $0 \"commit message\" Default: increment patch, commit & push"
|
||||
echo " $0 -p \"commit message\" Increment patch version"
|
||||
echo " $0 -m \"commit message\" Increment minor version"
|
||||
echo " $0 -M \"commit message\" Increment major version"
|
||||
echo " $0 -r \"commit message\" Create release with assets (no version increment)"
|
||||
echo " $0 -r -m \"commit message\" Create release with minor version increment"
|
||||
echo " $0 -h Show this help message"
|
||||
echo ""
|
||||
echo "OPTIONS:"
|
||||
echo " -p, --patch Increment patch version (default)"
|
||||
echo " -m, --minor Increment minor version"
|
||||
echo " -M, --major Increment major version"
|
||||
echo " -r, --release Create release with assets"
|
||||
echo " -h, --help Show this help message"
|
||||
echo ""
|
||||
echo "EXAMPLES:"
|
||||
echo " $0 \"Fixed event validation bug\""
|
||||
echo " $0 -m \"Added new features\""
|
||||
echo " $0 -M \"Breaking API changes\""
|
||||
echo " $0 -r \"Release current version\""
|
||||
echo " $0 -r -m \"Release with minor increment\""
|
||||
echo ""
|
||||
echo "VERSION INCREMENT MODES:"
|
||||
echo " -p, --patch (default): Increment patch version (v1.2.3 → v1.2.4)"
|
||||
echo " -m, --minor: Increment minor version, zero patch (v1.2.3 → v1.3.0)"
|
||||
echo " -M, --major: Increment major version, zero minor+patch (v1.2.3 → v2.0.0)"
|
||||
echo ""
|
||||
echo "RELEASE MODE (-r flag):"
|
||||
echo " - Build static binary using build_static.sh"
|
||||
echo " - Create source tarball"
|
||||
echo " - Git add, commit, push, and create Gitea release with assets"
|
||||
echo " - Can be combined with version increment flags"
|
||||
echo ""
|
||||
echo "REQUIREMENTS FOR RELEASE MODE:"
|
||||
echo " - Gitea token in ~/.gitea_token for release uploads"
|
||||
echo " - Docker installed for static binary builds"
|
||||
}
|
||||
|
||||
# Parse command line arguments
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-r|--release)
|
||||
RELEASE_MODE=true
|
||||
shift
|
||||
;;
|
||||
-p|--patch)
|
||||
VERSION_INCREMENT_TYPE="patch"
|
||||
shift
|
||||
;;
|
||||
-m|--minor)
|
||||
VERSION_INCREMENT_TYPE="minor"
|
||||
shift
|
||||
;;
|
||||
-M|--major)
|
||||
VERSION_INCREMENT_TYPE="major"
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
show_usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
# First non-flag argument is the commit message
|
||||
if [[ -z "$COMMIT_MESSAGE" ]]; then
|
||||
COMMIT_MESSAGE="$1"
|
||||
fi
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Validate inputs
|
||||
if [[ -z "$COMMIT_MESSAGE" ]]; then
|
||||
print_error "Commit message is required"
|
||||
echo ""
|
||||
show_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if we're in a git repository
|
||||
check_git_repo() {
|
||||
if ! git rev-parse --git-dir > /dev/null 2>&1; then
|
||||
print_error "Not in a git repository"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to get current version and increment appropriately
|
||||
increment_version() {
|
||||
local increment_type="$1" # "patch", "minor", or "major"
|
||||
|
||||
print_status "Getting current version..."
|
||||
|
||||
# Get the highest version tag (not chronologically latest)
|
||||
LATEST_TAG=$(git tag -l 'v*.*.*' | sort -V | tail -n 1 || echo "")
|
||||
if [[ -z "$LATEST_TAG" ]]; then
|
||||
LATEST_TAG="v0.0.0"
|
||||
print_warning "No version tags found, starting from $LATEST_TAG"
|
||||
fi
|
||||
|
||||
# Extract version components (remove 'v' prefix)
|
||||
VERSION=${LATEST_TAG#v}
|
||||
|
||||
# Parse major.minor.patch using regex
|
||||
if [[ $VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR=${BASH_REMATCH[1]}
|
||||
MINOR=${BASH_REMATCH[2]}
|
||||
PATCH=${BASH_REMATCH[3]}
|
||||
else
|
||||
print_error "Invalid version format in tag: $LATEST_TAG"
|
||||
print_error "Expected format: v0.1.0"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Increment version based on type
|
||||
if [[ "$increment_type" == "major" ]]; then
|
||||
# Major release: increment major, zero minor and patch
|
||||
NEW_MAJOR=$((MAJOR + 1))
|
||||
NEW_MINOR=0
|
||||
NEW_PATCH=0
|
||||
NEW_VERSION="v${NEW_MAJOR}.${NEW_MINOR}.${NEW_PATCH}"
|
||||
print_status "Major version increment: incrementing major version"
|
||||
elif [[ "$increment_type" == "minor" ]]; then
|
||||
# Minor release: increment minor, zero patch
|
||||
NEW_MAJOR=$MAJOR
|
||||
NEW_MINOR=$((MINOR + 1))
|
||||
NEW_PATCH=0
|
||||
NEW_VERSION="v${NEW_MAJOR}.${NEW_MINOR}.${NEW_PATCH}"
|
||||
print_status "Minor version increment: incrementing minor version"
|
||||
else
|
||||
# Default: increment patch
|
||||
NEW_MAJOR=$MAJOR
|
||||
NEW_MINOR=$MINOR
|
||||
NEW_PATCH=$((PATCH + 1))
|
||||
NEW_VERSION="v${NEW_MAJOR}.${NEW_MINOR}.${NEW_PATCH}"
|
||||
print_status "Patch version increment: incrementing patch version"
|
||||
fi
|
||||
|
||||
print_status "Current version: $LATEST_TAG"
|
||||
print_status "New version: $NEW_VERSION"
|
||||
|
||||
# Update version in src/main.h
|
||||
update_version_in_header "$NEW_VERSION" "$NEW_MAJOR" "$NEW_MINOR" "$NEW_PATCH"
|
||||
|
||||
# Export for use in other functions
|
||||
export NEW_VERSION
|
||||
}
|
||||
|
||||
# Function to update version macros in src/main.h
|
||||
update_version_in_header() {
|
||||
local new_version="$1"
|
||||
local major="$2"
|
||||
local minor="$3"
|
||||
local patch="$4"
|
||||
|
||||
print_status "Updating version in src/main.h..."
|
||||
|
||||
# Check if src/main.h exists
|
||||
if [[ ! -f "src/main.h" ]]; then
|
||||
print_error "src/main.h not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Update VERSION macro
|
||||
sed -i "s/#define VERSION \".*\"/#define VERSION \"$new_version\"/" src/main.h
|
||||
|
||||
# Update VERSION_MAJOR macro
|
||||
sed -i "s/#define VERSION_MAJOR [0-9]\+/#define VERSION_MAJOR $major/" src/main.h
|
||||
|
||||
# Update VERSION_MINOR macro
|
||||
sed -i "s/#define VERSION_MINOR .*/#define VERSION_MINOR $minor/" src/main.h
|
||||
|
||||
# Update VERSION_PATCH macro
|
||||
sed -i "s/#define VERSION_PATCH [0-9]\+/#define VERSION_PATCH $patch/" src/main.h
|
||||
|
||||
print_success "Updated version in src/main.h to $new_version"
|
||||
}
|
||||
|
||||
# Function to commit and push changes
|
||||
git_commit_and_push() {
|
||||
print_status "Preparing git commit..."
|
||||
|
||||
# Stage all changes
|
||||
if git add . > /dev/null 2>&1; then
|
||||
print_success "Staged all changes"
|
||||
else
|
||||
print_error "Failed to stage changes"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if there are changes to commit
|
||||
if git diff --staged --quiet; then
|
||||
print_warning "No changes to commit"
|
||||
else
|
||||
# Commit changes
|
||||
if git commit -m "$NEW_VERSION - $COMMIT_MESSAGE" > /dev/null 2>&1; then
|
||||
print_success "Committed changes"
|
||||
else
|
||||
print_error "Failed to commit changes"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create new git tag
|
||||
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Created tag: $NEW_VERSION"
|
||||
else
|
||||
print_warning "Tag $NEW_VERSION already exists"
|
||||
fi
|
||||
|
||||
# Push changes and tags
|
||||
print_status "Pushing to remote repository..."
|
||||
if git push > /dev/null 2>&1; then
|
||||
print_success "Pushed changes"
|
||||
else
|
||||
print_error "Failed to push changes"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Push only the new tag to avoid conflicts with existing tags
|
||||
if git push origin "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Pushed tag: $NEW_VERSION"
|
||||
else
|
||||
print_warning "Tag push failed, trying force push..."
|
||||
if git push --force origin "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Force-pushed updated tag: $NEW_VERSION"
|
||||
else
|
||||
print_error "Failed to push tag: $NEW_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to commit and push changes without creating a tag (tag already created)
|
||||
git_commit_and_push_no_tag() {
|
||||
print_status "Preparing git commit..."
|
||||
|
||||
# Stage all changes
|
||||
if git add . > /dev/null 2>&1; then
|
||||
print_success "Staged all changes"
|
||||
else
|
||||
print_error "Failed to stage changes"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if there are changes to commit
|
||||
if git diff --staged --quiet; then
|
||||
print_warning "No changes to commit"
|
||||
else
|
||||
# Commit changes
|
||||
if git commit -m "$NEW_VERSION - $COMMIT_MESSAGE" > /dev/null 2>&1; then
|
||||
print_success "Committed changes"
|
||||
else
|
||||
print_error "Failed to commit changes"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Push changes and tags
|
||||
print_status "Pushing to remote repository..."
|
||||
if git push > /dev/null 2>&1; then
|
||||
print_success "Pushed changes"
|
||||
else
|
||||
print_error "Failed to push changes"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Push only the new tag to avoid conflicts with existing tags
|
||||
if git push origin "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Pushed tag: $NEW_VERSION"
|
||||
else
|
||||
print_warning "Tag push failed, trying force push..."
|
||||
if git push --force origin "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Force-pushed updated tag: $NEW_VERSION"
|
||||
else
|
||||
print_error "Failed to push tag: $NEW_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to build release binary
|
||||
build_release_binary() {
|
||||
print_status "Building release binary..."
|
||||
|
||||
# Check if build_static.sh exists
|
||||
if [[ ! -f "build_static.sh" ]]; then
|
||||
print_error "build_static.sh not found"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Run the static build script
|
||||
if ./build_static.sh > /dev/null 2>&1; then
|
||||
print_success "Built static binary successfully"
|
||||
return 0
|
||||
else
|
||||
print_error "Failed to build static binary"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to create source tarball
|
||||
create_source_tarball() {
|
||||
print_status "Creating source tarball..."
|
||||
|
||||
local tarball_name="c-relay-${NEW_VERSION#v}.tar.gz"
|
||||
|
||||
# Create tarball excluding build artifacts and git files
|
||||
if tar -czf "$tarball_name" \
|
||||
--exclude='build/*' \
|
||||
--exclude='.git*' \
|
||||
--exclude='*.db' \
|
||||
--exclude='*.db-*' \
|
||||
--exclude='*.log' \
|
||||
--exclude='*.tar.gz' \
|
||||
. > /dev/null 2>&1; then
|
||||
print_success "Created source tarball: $tarball_name"
|
||||
echo "$tarball_name"
|
||||
return 0
|
||||
else
|
||||
print_error "Failed to create source tarball"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to upload release assets to Gitea
|
||||
upload_release_assets() {
|
||||
local release_id="$1"
|
||||
local binary_path="$2"
|
||||
local tarball_path="$3"
|
||||
|
||||
print_status "Uploading release assets..."
|
||||
|
||||
# Check for Gitea token
|
||||
if [[ ! -f "$HOME/.gitea_token" ]]; then
|
||||
print_warning "No ~/.gitea_token found. Skipping asset uploads."
|
||||
return 0
|
||||
fi
|
||||
|
||||
local token=$(cat "$HOME/.gitea_token" | tr -d '\n\r')
|
||||
local api_url="https://git.laantungir.net/api/v1/repos/laantungir/c-relay"
|
||||
local assets_url="$api_url/releases/$release_id/assets"
|
||||
print_status "Assets URL: $assets_url"
|
||||
|
||||
# Upload binary
|
||||
if [[ -f "$binary_path" ]]; then
|
||||
print_status "Uploading binary: $(basename "$binary_path")"
|
||||
|
||||
# Retry loop for eventual consistency
|
||||
local max_attempts=3
|
||||
local attempt=1
|
||||
while [[ $attempt -le $max_attempts ]]; do
|
||||
print_status "Upload attempt $attempt/$max_attempts"
|
||||
local binary_response=$(curl -fS -X POST "$assets_url" \
|
||||
-H "Authorization: token $token" \
|
||||
-F "attachment=@$binary_path;filename=$(basename "$binary_path")" \
|
||||
-F "name=$(basename "$binary_path")")
|
||||
|
||||
if echo "$binary_response" | grep -q '"id"'; then
|
||||
print_success "Uploaded binary successfully"
|
||||
break
|
||||
else
|
||||
print_warning "Upload attempt $attempt failed"
|
||||
if [[ $attempt -lt $max_attempts ]]; then
|
||||
print_status "Retrying in 2 seconds..."
|
||||
sleep 2
|
||||
else
|
||||
print_error "Failed to upload binary after $max_attempts attempts"
|
||||
print_error "Response: $binary_response"
|
||||
fi
|
||||
fi
|
||||
((attempt++))
|
||||
done
|
||||
fi
|
||||
|
||||
# Upload source tarball
|
||||
if [[ -f "$tarball_path" ]]; then
|
||||
print_status "Uploading source tarball: $(basename "$tarball_path")"
|
||||
local tarball_response=$(curl -s -X POST "$api_url/releases/$release_id/assets" \
|
||||
-H "Authorization: token $token" \
|
||||
-F "attachment=@$tarball_path;filename=$(basename "$tarball_path")")
|
||||
|
||||
if echo "$tarball_response" | grep -q '"id"'; then
|
||||
print_success "Uploaded source tarball successfully"
|
||||
else
|
||||
print_warning "Failed to upload source tarball: $tarball_response"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to create Gitea release
|
||||
create_gitea_release() {
|
||||
print_status "Creating Gitea release..."
|
||||
|
||||
# Check for Gitea token
|
||||
if [[ ! -f "$HOME/.gitea_token" ]]; then
|
||||
print_warning "No ~/.gitea_token found. Skipping release creation."
|
||||
print_warning "Create ~/.gitea_token with your Gitea access token to enable releases."
|
||||
return 0
|
||||
fi
|
||||
|
||||
local token=$(cat "$HOME/.gitea_token" | tr -d '\n\r')
|
||||
local api_url="https://git.laantungir.net/api/v1/repos/laantungir/c-relay"
|
||||
|
||||
# Create release
|
||||
print_status "Creating release $NEW_VERSION..."
|
||||
local response=$(curl -s -X POST "$api_url/releases" \
|
||||
-H "Authorization: token $token" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"tag_name\": \"$NEW_VERSION\", \"name\": \"$NEW_VERSION\", \"body\": \"$COMMIT_MESSAGE\"}")
|
||||
|
||||
if echo "$response" | grep -q '"id"'; then
|
||||
print_success "Created release $NEW_VERSION"
|
||||
# Extract release ID for asset uploads
|
||||
local release_id=$(echo "$response" | grep -o '"id":[0-9]*' | head -1 | cut -d':' -f2)
|
||||
echo $release_id
|
||||
elif echo "$response" | grep -q "already exists"; then
|
||||
print_warning "Release $NEW_VERSION already exists"
|
||||
# Try to get existing release ID
|
||||
local check_response=$(curl -s -H "Authorization: token $token" "$api_url/releases/tags/$NEW_VERSION")
|
||||
if echo "$check_response" | grep -q '"id"'; then
|
||||
local release_id=$(echo "$check_response" | grep -o '"id":[0-9]*' | head -1 | cut -d':' -f2)
|
||||
print_status "Using existing release ID: $release_id"
|
||||
echo $release_id
|
||||
else
|
||||
print_error "Could not find existing release ID"
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
print_error "Failed to create release $NEW_VERSION"
|
||||
print_error "Response: $response"
|
||||
|
||||
# Try to check if the release exists anyway
|
||||
print_status "Checking if release exists..."
|
||||
local check_response=$(curl -s -H "Authorization: token $token" "$api_url/releases/tags/$NEW_VERSION")
|
||||
if echo "$check_response" | grep -q '"id"'; then
|
||||
print_warning "Release exists but creation response was unexpected"
|
||||
local release_id=$(echo "$check_response" | grep -o '"id":[0-9]*' | head -1 | cut -d':' -f2)
|
||||
echo $release_id
|
||||
else
|
||||
print_error "Release does not exist and creation failed"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Main execution
|
||||
main() {
|
||||
print_status "C-Relay Increment and Push Script"
|
||||
|
||||
# Check prerequisites
|
||||
check_git_repo
|
||||
|
||||
if [[ "$RELEASE_MODE" == true ]]; then
|
||||
print_status "=== RELEASE MODE ==="
|
||||
|
||||
# Only increment version if explicitly requested (not just because of -r flag)
|
||||
if [[ "$VERSION_INCREMENT_TYPE" != "patch" ]]; then
|
||||
increment_version "$VERSION_INCREMENT_TYPE"
|
||||
else
|
||||
# In release mode without version increment, get current version
|
||||
LATEST_TAG=$(git tag -l 'v*.*.*' | sort -V | tail -n 1 || echo "v0.0.0")
|
||||
NEW_VERSION="$LATEST_TAG"
|
||||
export NEW_VERSION
|
||||
fi
|
||||
|
||||
# Create new git tag BEFORE compilation so version.h picks it up
|
||||
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Created tag: $NEW_VERSION"
|
||||
else
|
||||
print_warning "Tag $NEW_VERSION already exists, removing and recreating..."
|
||||
git tag -d "$NEW_VERSION" > /dev/null 2>&1
|
||||
git tag "$NEW_VERSION" > /dev/null 2>&1
|
||||
fi
|
||||
|
||||
# Commit and push (but skip tag creation since we already did it)
|
||||
git_commit_and_push_no_tag
|
||||
|
||||
# Build release binary
|
||||
if build_release_binary; then
|
||||
local binary_path="build/c_relay_static_x86_64"
|
||||
else
|
||||
print_warning "Binary build failed, continuing with release creation"
|
||||
# Check if binary exists from previous build
|
||||
if [[ -f "build/c_relay_static_x86_64" ]]; then
|
||||
print_status "Using existing binary from previous build"
|
||||
binary_path="build/c_relay_static_x86_64"
|
||||
else
|
||||
binary_path=""
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create source tarball
|
||||
local tarball_path=""
|
||||
if tarball_path=$(create_source_tarball); then
|
||||
: # tarball_path is set by the function
|
||||
else
|
||||
print_warning "Source tarball creation failed, continuing with release creation"
|
||||
fi
|
||||
|
||||
# Create Gitea release
|
||||
local release_id=""
|
||||
if release_id=$(create_gitea_release); then
|
||||
# Validate release_id is numeric
|
||||
if [[ "$release_id" =~ ^[0-9]+$ ]]; then
|
||||
# Upload assets if we have a release ID and assets
|
||||
if [[ -n "$release_id" && (-n "$binary_path" || -n "$tarball_path") ]]; then
|
||||
upload_release_assets "$release_id" "$binary_path" "$tarball_path"
|
||||
fi
|
||||
print_success "Release $NEW_VERSION completed successfully!"
|
||||
else
|
||||
print_error "Invalid release_id: $release_id"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
print_error "Release creation failed"
|
||||
fi
|
||||
|
||||
else
|
||||
print_status "=== DEFAULT MODE ==="
|
||||
|
||||
# Increment version based on type (default to patch)
|
||||
increment_version "$VERSION_INCREMENT_TYPE"
|
||||
|
||||
# Create new git tag BEFORE compilation so version.h picks it up
|
||||
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
||||
print_success "Created tag: $NEW_VERSION"
|
||||
else
|
||||
print_warning "Tag $NEW_VERSION already exists, removing and recreating..."
|
||||
git tag -d "$NEW_VERSION" > /dev/null 2>&1
|
||||
git tag "$NEW_VERSION" > /dev/null 2>&1
|
||||
fi
|
||||
|
||||
# Commit and push (but skip tag creation since we already did it)
|
||||
git_commit_and_push_no_tag
|
||||
|
||||
print_success "Increment and push completed successfully!"
|
||||
print_status "Version $NEW_VERSION pushed to repository"
|
||||
fi
|
||||
}
|
||||
|
||||
# Execute main function
|
||||
main
|
||||
@@ -133,6 +133,11 @@ if [ -n "$PORT_OVERRIDE" ]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
# Validate strict port flag (only makes sense with port override)
|
||||
if [ "$USE_TEST_KEYS" = true ] && [ -z "$PORT_OVERRIDE" ]; then
|
||||
echo "WARNING: --strict-port is always used with test keys. Consider specifying a custom port with -p."
|
||||
fi
|
||||
|
||||
# Validate debug level if provided
|
||||
if [ -n "$DEBUG_LEVEL" ]; then
|
||||
if ! [[ "$DEBUG_LEVEL" =~ ^[0-5]$ ]]; then
|
||||
@@ -163,6 +168,8 @@ if [ "$HELP" = true ]; then
|
||||
echo " $0 # Fresh start with random keys"
|
||||
echo " $0 -a <admin-hex> -r <relay-hex> # Use custom keys"
|
||||
echo " $0 -a <admin-hex> -p 9000 # Custom admin key on port 9000"
|
||||
echo " $0 -p 7777 --strict-port # Fail if port 7777 unavailable (no fallback)"
|
||||
echo " $0 -p 8080 --strict-port -d=3 # Custom port with strict binding and debug"
|
||||
echo " $0 --debug-level=3 # Start with debug level 3 (info)"
|
||||
echo " $0 -d=5 # Start with debug level 5 (trace)"
|
||||
echo " $0 --preserve-database # Preserve existing database and keys"
|
||||
|
||||
1
nips
92
notes.txt
Normal file
@@ -0,0 +1,92 @@
|
||||
|
||||
Task Completed
|
||||
Perfect! The core dump limit is now set correctly. You can see both:
|
||||
|
||||
LimitCORE=infinity
|
||||
LimitCORESoft=infinity
|
||||
What happens next:
|
||||
The relay is now configured to generate core dumps when it crashes
|
||||
Wait for the next crash to occur (based on your logs, it seems to happen intermittently)
|
||||
When it crashes, the core dump will be automatically captured by systemd
|
||||
After the next crash, analyze it:
|
||||
# List all core dumps (most recent first)
|
||||
sudo coredumpctl list
|
||||
|
||||
# View info about the most recent c-relay crash
|
||||
sudo coredumpctl info c-relay
|
||||
|
||||
# Load the core dump in gdb for detailed analysis
|
||||
sudo coredumpctl gdb c-relay
|
||||
|
||||
Inside gdb, run these commands:
|
||||
(gdb) bt full # Full backtrace with all variables
|
||||
(gdb) info threads # Show all threads
|
||||
(gdb) thread apply all bt # Backtrace for all threads
|
||||
(gdb) frame 0 # Go to crash frame
|
||||
(gdb) info locals # Show local variables
|
||||
(gdb) list # Show source code around crash
|
||||
|
||||
The backtrace will show you exactly which line of code caused the segmentation fault and the state of all variables at that moment, which should pinpoint whether it's the subscription management issue, buffer overflow, or something else.
|
||||
|
||||
You're all set up now - just wait for the next crash and then run the coredumpctl commands to analyze it.
|
||||
|
||||
|
||||
--------------------------
|
||||
|
||||
|
||||
Even simpler: Use this one-liner
|
||||
# Start relay and immediately attach gdb
|
||||
cd /usr/local/bin/c_relay
|
||||
sudo -u c-relay ./c_relay --debug-level=5 & sleep 2 && sudo gdb -p $(pgrep c_relay)
|
||||
|
||||
Inside gdb, after attaching:
|
||||
|
||||
(gdb) continue
|
||||
Or shorter:
|
||||
(gdb) c
|
||||
|
||||
|
||||
How to View the Logs
|
||||
Check systemd journal:
|
||||
# View all c-relay logs
|
||||
sudo journalctl -u c-relay
|
||||
|
||||
# View recent logs (last 50 lines)
|
||||
sudo journalctl -u c-relay -n 50
|
||||
|
||||
# Follow logs in real-time
|
||||
sudo journalctl -u c-relay -f
|
||||
|
||||
# View logs since last boot
|
||||
sudo journalctl -u c-relay -b
|
||||
|
||||
Check if service is running:
|
||||
|
||||
|
||||
|
||||
To immediately trim the syslog file size:
|
||||
|
||||
Safe Syslog Truncation
|
||||
Stop syslog service first:
|
||||
sudo systemctl stop rsyslog
|
||||
|
||||
Truncate the syslog file:
|
||||
sudo truncate -s 0 /var/log/syslog
|
||||
|
||||
Restart syslog service:
|
||||
sudo systemctl start rsyslog
|
||||
sudo systemctl status rsyslog
|
||||
|
||||
|
||||
sudo -u c-relay ./c_relay --debug-level=5 -r 85d0b37e2ae822966dcadd06b2dc9368cde73865f90ea4d44f8b57d47ef0820a -a 1ec454734dcbf6fe54901ce25c0c7c6bca5edd89443416761fadc321d38df139
|
||||
|
||||
./c_relay_static_x86_64 -p 7889 --debug-level=5 -r 85d0b37e2ae822966dcadd06b2dc9368cde73865f90ea4d44f8b57d47ef0820a -a 1ec454734dcbf6fe54901ce25c0c7c6bca5edd89443416761fadc321d38df139
|
||||
|
||||
|
||||
sudo ufw allow 8888/tcp
|
||||
sudo ufw delete allow 8888/tcp
|
||||
|
||||
lsof -i :7777
|
||||
kill $(lsof -t -i :7777)
|
||||
kill -9 $(lsof -t -i :7777)
|
||||
|
||||
BIN
screenshots/2025-10-29_15-33.png
Normal file
|
After Width: | Height: | Size: 2.6 MiB |
BIN
screenshots/2025-10-29_15-35.png
Normal file
|
After Width: | Height: | Size: 2.5 MiB |
BIN
screenshots/2025-10-29_15-35_1.png
Normal file
|
After Width: | Height: | Size: 2.6 MiB |
BIN
screenshots/2025-10-29_16-04.png
Normal file
|
After Width: | Height: | Size: 2.5 MiB |
BIN
screenshots/2025-11-11_11-57.png
Normal file
|
After Width: | Height: | Size: 738 KiB |
BIN
screenshots/2025-11-13_15-53.png
Normal file
|
After Width: | Height: | Size: 2.2 MiB |
BIN
screenshots/2025-11-13_15-53_1.png
Normal file
|
After Width: | Height: | Size: 1.3 MiB |
BIN
screenshots/DM.png
Normal file
|
After Width: | Height: | Size: 79 KiB |
BIN
screenshots/ProfileEvents.png
Normal file
|
After Width: | Height: | Size: 105 KiB |
BIN
screenshots/config.png
Normal file
|
After Width: | Height: | Size: 86 KiB |
BIN
screenshots/light-mode.png
Normal file
|
After Width: | Height: | Size: 79 KiB |
BIN
screenshots/main-light.png
Normal file
|
After Width: | Height: | Size: 84 KiB |
BIN
screenshots/main.png
Normal file
|
After Width: | Height: | Size: 76 KiB |
BIN
screenshots/raffles.png
Normal file
|
After Width: | Height: | Size: 296 KiB |
BIN
screenshots/sqlQuery.png
Normal file
|
After Width: | Height: | Size: 154 KiB |
BIN
screenshots/subscriptions.png
Normal file
|
After Width: | Height: | Size: 157 KiB |
BIN
screenshots/white-blacklists.png
Normal file
|
After Width: | Height: | Size: 45 KiB |
49
src/api.h
@@ -1,8 +1,9 @@
|
||||
// API module for serving embedded web content
|
||||
// API module for serving embedded web content and admin API functions
|
||||
#ifndef API_H
|
||||
#define API_H
|
||||
|
||||
#include <libwebsockets.h>
|
||||
#include <cjson/cJSON.h>
|
||||
|
||||
// Embedded file session data structure for managing buffer lifetime
|
||||
struct embedded_file_session_data {
|
||||
@@ -14,10 +15,56 @@ struct embedded_file_session_data {
|
||||
int body_sent;
|
||||
};
|
||||
|
||||
// Configuration change pending structure
|
||||
typedef struct pending_config_change {
|
||||
char admin_pubkey[65]; // Who requested the change
|
||||
char config_key[128]; // What config to change
|
||||
char old_value[256]; // Current value
|
||||
char new_value[256]; // Requested new value
|
||||
time_t timestamp; // When requested
|
||||
char change_id[33]; // Unique ID for this change (first 32 chars of hash)
|
||||
struct pending_config_change* next; // Linked list for concurrent changes
|
||||
} pending_config_change_t;
|
||||
|
||||
// Handle HTTP request for embedded API files
|
||||
int handle_embedded_file_request(struct lws* wsi, const char* requested_uri);
|
||||
|
||||
// Generate stats JSON from database queries
|
||||
char* generate_stats_json(void);
|
||||
|
||||
// Generate human-readable stats text
|
||||
char* generate_stats_text(void);
|
||||
|
||||
// Generate config text from database
|
||||
char* generate_config_text(void);
|
||||
|
||||
// Send admin response with request ID correlation
|
||||
int send_admin_response(const char* sender_pubkey, const char* response_content, const char* request_id,
|
||||
char* error_message, size_t error_size, struct lws* wsi);
|
||||
|
||||
// Configuration change system functions
|
||||
int parse_config_command(const char* message, char* key, char* value);
|
||||
int validate_config_change(const char* key, const char* value);
|
||||
char* store_pending_config_change(const char* admin_pubkey, const char* key,
|
||||
const char* old_value, const char* new_value);
|
||||
pending_config_change_t* find_pending_change(const char* admin_pubkey, const char* change_id);
|
||||
int apply_config_change(const char* key, const char* value);
|
||||
void cleanup_expired_pending_changes(void);
|
||||
int handle_config_confirmation(const char* admin_pubkey, const char* response);
|
||||
char* generate_config_change_confirmation(const char* key, const char* old_value, const char* new_value);
|
||||
int process_config_change_request(const char* admin_pubkey, const char* message);
|
||||
|
||||
// SQL query functions
|
||||
int validate_sql_query(const char* query, char* error_message, size_t error_size);
|
||||
char* execute_sql_query(const char* query, const char* request_id, char* error_message, size_t error_size);
|
||||
int handle_sql_query_unified(cJSON* event, const char* query, char* error_message, size_t error_size, struct lws* wsi);
|
||||
|
||||
// Monitoring system functions
|
||||
void monitoring_on_event_stored(void);
|
||||
void monitoring_on_subscription_change(void);
|
||||
int get_monitoring_throttle_seconds(void);
|
||||
|
||||
// Kind 1 status posts
|
||||
int generate_and_post_status_event(void);
|
||||
|
||||
#endif // API_H
|
||||
1094
src/config.c
79
src/config.h
@@ -27,78 +27,6 @@ struct lws;
|
||||
// Database path for event-based config
|
||||
extern char g_database_path[512];
|
||||
|
||||
// Unified configuration cache structure (consolidates all caching systems)
|
||||
typedef struct {
|
||||
// Critical keys (frequently accessed)
|
||||
char admin_pubkey[65];
|
||||
char relay_pubkey[65];
|
||||
|
||||
// Auth config (from request_validator)
|
||||
int auth_required;
|
||||
long max_file_size;
|
||||
int admin_enabled;
|
||||
int nip42_mode;
|
||||
int nip42_challenge_timeout;
|
||||
int nip42_time_tolerance;
|
||||
int nip70_protected_events_enabled;
|
||||
|
||||
// Static buffer for config values (replaces static buffers in get_config_value functions)
|
||||
char temp_buffer[CONFIG_VALUE_MAX_LENGTH];
|
||||
|
||||
// NIP-11 relay information (migrated from g_relay_info in main.c)
|
||||
struct {
|
||||
char name[RELAY_NAME_MAX_LENGTH];
|
||||
char description[RELAY_DESCRIPTION_MAX_LENGTH];
|
||||
char banner[RELAY_URL_MAX_LENGTH];
|
||||
char icon[RELAY_URL_MAX_LENGTH];
|
||||
char pubkey[RELAY_PUBKEY_MAX_LENGTH];
|
||||
char contact[RELAY_CONTACT_MAX_LENGTH];
|
||||
char software[RELAY_URL_MAX_LENGTH];
|
||||
char version[64];
|
||||
char privacy_policy[RELAY_URL_MAX_LENGTH];
|
||||
char terms_of_service[RELAY_URL_MAX_LENGTH];
|
||||
// Raw string values for parsing into cJSON arrays
|
||||
char supported_nips_str[CONFIG_VALUE_MAX_LENGTH];
|
||||
char language_tags_str[CONFIG_VALUE_MAX_LENGTH];
|
||||
char relay_countries_str[CONFIG_VALUE_MAX_LENGTH];
|
||||
// Parsed cJSON arrays
|
||||
cJSON* supported_nips;
|
||||
cJSON* limitation;
|
||||
cJSON* retention;
|
||||
cJSON* relay_countries;
|
||||
cJSON* language_tags;
|
||||
cJSON* tags;
|
||||
char posting_policy[RELAY_URL_MAX_LENGTH];
|
||||
cJSON* fees;
|
||||
char payments_url[RELAY_URL_MAX_LENGTH];
|
||||
} relay_info;
|
||||
|
||||
// NIP-13 PoW configuration (migrated from g_pow_config in main.c)
|
||||
struct {
|
||||
int enabled;
|
||||
int min_pow_difficulty;
|
||||
int validation_flags;
|
||||
int require_nonce_tag;
|
||||
int reject_lower_targets;
|
||||
int strict_format;
|
||||
int anti_spam_mode;
|
||||
} pow_config;
|
||||
|
||||
// NIP-40 Expiration configuration (migrated from g_expiration_config in main.c)
|
||||
struct {
|
||||
int enabled;
|
||||
int strict_mode;
|
||||
int filter_responses;
|
||||
int delete_expired;
|
||||
long grace_period;
|
||||
} expiration_config;
|
||||
|
||||
// Cache management
|
||||
time_t cache_expires;
|
||||
int cache_valid;
|
||||
pthread_mutex_t cache_lock;
|
||||
} unified_config_cache_t;
|
||||
|
||||
// Command line options structure for first-time startup
|
||||
typedef struct {
|
||||
int port_override; // -1 = not set, >0 = port value
|
||||
@@ -108,9 +36,6 @@ typedef struct {
|
||||
int debug_level; // 0-5, default 0 (no debug output)
|
||||
} cli_options_t;
|
||||
|
||||
// Global unified configuration cache
|
||||
extern unified_config_cache_t g_unified_cache;
|
||||
|
||||
// Core configuration functions (temporary compatibility)
|
||||
int init_configuration_system(const char* config_dir_override, const char* config_file_override);
|
||||
void cleanup_configuration_system(void);
|
||||
@@ -138,7 +63,7 @@ int get_config_bool(const char* key, int default_value);
|
||||
|
||||
// First-time startup functions
|
||||
int is_first_time_startup(void);
|
||||
int first_time_startup_sequence(const cli_options_t* cli_options);
|
||||
int first_time_startup_sequence(const cli_options_t* cli_options, char* admin_pubkey_out, char* relay_pubkey_out, char* relay_privkey_out);
|
||||
int startup_existing_relay(const char* relay_pubkey, const cli_options_t* cli_options);
|
||||
|
||||
// Configuration application functions
|
||||
@@ -189,7 +114,7 @@ cJSON* build_query_response(const char* query_type, cJSON* results_array, int to
|
||||
|
||||
// Auth rules management functions
|
||||
int add_auth_rule_from_config(const char* rule_type, const char* pattern_type,
|
||||
const char* pattern_value, const char* action);
|
||||
const char* pattern_value);
|
||||
int remove_auth_rule_from_config(const char* rule_type, const char* pattern_type,
|
||||
const char* pattern_value);
|
||||
|
||||
|
||||
51
src/debug.c
@@ -1,51 +0,0 @@
|
||||
#include "debug.h"
|
||||
#include <stdarg.h>
|
||||
#include <string.h>
|
||||
|
||||
// Global debug level (default: no debug output)
|
||||
debug_level_t g_debug_level = DEBUG_LEVEL_NONE;
|
||||
|
||||
void debug_init(int level) {
|
||||
if (level < 0) level = 0;
|
||||
if (level > 5) level = 5;
|
||||
g_debug_level = (debug_level_t)level;
|
||||
}
|
||||
|
||||
void debug_log(debug_level_t level, const char* file, int line, const char* format, ...) {
|
||||
// Get timestamp
|
||||
time_t now = time(NULL);
|
||||
struct tm* tm_info = localtime(&now);
|
||||
char timestamp[32];
|
||||
strftime(timestamp, sizeof(timestamp), "%Y-%m-%d %H:%M:%S", tm_info);
|
||||
|
||||
// Get level string
|
||||
const char* level_str = "UNKNOWN";
|
||||
switch (level) {
|
||||
case DEBUG_LEVEL_ERROR: level_str = "ERROR"; break;
|
||||
case DEBUG_LEVEL_WARN: level_str = "WARN "; break;
|
||||
case DEBUG_LEVEL_INFO: level_str = "INFO "; break;
|
||||
case DEBUG_LEVEL_DEBUG: level_str = "DEBUG"; break;
|
||||
case DEBUG_LEVEL_TRACE: level_str = "TRACE"; break;
|
||||
default: break;
|
||||
}
|
||||
|
||||
// Print prefix with timestamp and level
|
||||
printf("[%s] [%s] ", timestamp, level_str);
|
||||
|
||||
// Print source location when debug level is TRACE (5) or higher
|
||||
if (file && g_debug_level >= DEBUG_LEVEL_TRACE) {
|
||||
// Extract just the filename (not full path)
|
||||
const char* filename = strrchr(file, '/');
|
||||
filename = filename ? filename + 1 : file;
|
||||
printf("[%s:%d] ", filename, line);
|
||||
}
|
||||
|
||||
// Print message
|
||||
va_list args;
|
||||
va_start(args, format);
|
||||
vprintf(format, args);
|
||||
va_end(args);
|
||||
|
||||
printf("\n");
|
||||
fflush(stdout);
|
||||
}
|
||||
43
src/debug.h
@@ -1,43 +0,0 @@
|
||||
#ifndef DEBUG_H
|
||||
#define DEBUG_H
|
||||
|
||||
#include <stdio.h>
|
||||
#include <time.h>
|
||||
|
||||
// Debug levels
|
||||
typedef enum {
|
||||
DEBUG_LEVEL_NONE = 0,
|
||||
DEBUG_LEVEL_ERROR = 1,
|
||||
DEBUG_LEVEL_WARN = 2,
|
||||
DEBUG_LEVEL_INFO = 3,
|
||||
DEBUG_LEVEL_DEBUG = 4,
|
||||
DEBUG_LEVEL_TRACE = 5
|
||||
} debug_level_t;
|
||||
|
||||
// Global debug level (set at runtime via CLI)
|
||||
extern debug_level_t g_debug_level;
|
||||
|
||||
// Initialize debug system
|
||||
void debug_init(int level);
|
||||
|
||||
// Core logging function
|
||||
void debug_log(debug_level_t level, const char* file, int line, const char* format, ...);
|
||||
|
||||
// Convenience macros that check level before calling
|
||||
// Note: TRACE level (5) and above include file:line information for ALL messages
|
||||
#define DEBUG_ERROR(...) \
|
||||
do { if (g_debug_level >= DEBUG_LEVEL_ERROR) debug_log(DEBUG_LEVEL_ERROR, __FILE__, __LINE__, __VA_ARGS__); } while(0)
|
||||
|
||||
#define DEBUG_WARN(...) \
|
||||
do { if (g_debug_level >= DEBUG_LEVEL_WARN) debug_log(DEBUG_LEVEL_WARN, __FILE__, __LINE__, __VA_ARGS__); } while(0)
|
||||
|
||||
#define DEBUG_INFO(...) \
|
||||
do { if (g_debug_level >= DEBUG_LEVEL_INFO) debug_log(DEBUG_LEVEL_INFO, __FILE__, __LINE__, __VA_ARGS__); } while(0)
|
||||
|
||||
#define DEBUG_LOG(...) \
|
||||
do { if (g_debug_level >= DEBUG_LEVEL_DEBUG) debug_log(DEBUG_LEVEL_DEBUG, __FILE__, __LINE__, __VA_ARGS__); } while(0)
|
||||
|
||||
#define DEBUG_TRACE(...) \
|
||||
do { if (g_debug_level >= DEBUG_LEVEL_TRACE) debug_log(DEBUG_LEVEL_TRACE, __FILE__, __LINE__, __VA_ARGS__); } while(0)
|
||||
|
||||
#endif /* DEBUG_H */
|
||||
@@ -28,6 +28,8 @@ static const struct {
|
||||
{"nip42_auth_required_subscriptions", "false"},
|
||||
{"nip42_auth_required_kinds", "4,14"}, // Default: DM kinds require auth
|
||||
{"nip42_challenge_expiration", "600"}, // 10 minutes
|
||||
{"nip42_challenge_timeout", "600"}, // Challenge timeout (seconds)
|
||||
{"nip42_time_tolerance", "300"}, // Time tolerance (seconds)
|
||||
|
||||
// NIP-70 Protected Events
|
||||
{"nip70_protected_events_enabled", "false"},
|
||||
@@ -63,6 +65,9 @@ static const struct {
|
||||
{"max_total_subscriptions", "5000"},
|
||||
{"max_filters_per_subscription", "10"},
|
||||
|
||||
// Connection Management
|
||||
{"max_connection_seconds", "86400"}, // 24 hours (0 = disabled)
|
||||
|
||||
// Event Processing Limits
|
||||
{"max_event_tags", "100"},
|
||||
{"max_content_length", "8196"},
|
||||
@@ -70,7 +75,19 @@ static const struct {
|
||||
|
||||
// Performance Settings
|
||||
{"default_limit", "500"},
|
||||
{"max_limit", "5000"}
|
||||
{"max_limit", "5000"},
|
||||
|
||||
// Proxy Settings
|
||||
// Trust proxy headers (X-Forwarded-For, X-Real-IP) for accurate client IP detection
|
||||
// Safe for informational/debugging use. Only becomes a security concern if you implement
|
||||
// IP-based rate limiting or access control (which would require firewall protection anyway)
|
||||
{"trust_proxy_headers", "true"},
|
||||
|
||||
// NIP-59 Gift Wrap Timestamp Configuration
|
||||
{"nip59_timestamp_max_delay_sec", "0"},
|
||||
|
||||
// Kind 1 Status Posts
|
||||
{"kind_1_status_posts_hours", "1"}
|
||||
};
|
||||
|
||||
// Number of default configuration values
|
||||
|
||||
BIN
src/default_config_event.h.gch
Normal file
1246
src/dm_admin.c
@@ -1,3 +1,5 @@
|
||||
// Note to assistants. dm_admin is only for functions relating to direct messaging
|
||||
|
||||
#ifndef DM_ADMIN_H
|
||||
#define DM_ADMIN_H
|
||||
|
||||
@@ -24,4 +26,11 @@ int send_nip17_response(const char* sender_pubkey, const char* response_content,
|
||||
char* generate_config_text(void);
|
||||
char* generate_stats_text(void);
|
||||
|
||||
// SQL query admin functions
|
||||
int validate_sql_query(const char* query, char* error_message, size_t error_size);
|
||||
char* execute_sql_query(const char* query, const char* request_id, char* error_message, size_t error_size);
|
||||
int handle_sql_query_unified(cJSON* event, const char* query, char* error_message, size_t error_size, struct lws* wsi);
|
||||
int send_admin_response(const char* sender_pubkey, const char* response_content, const char* request_id,
|
||||
char* error_message, size_t error_size, struct lws* wsi);
|
||||
|
||||
#endif // DM_ADMIN_H
|
||||
455
src/main.c
@@ -74,10 +74,6 @@ struct relay_info {
|
||||
char payments_url[RELAY_URL_MAX_LENGTH];
|
||||
};
|
||||
|
||||
// Global relay information instance moved to unified cache
|
||||
// static struct relay_info g_relay_info = {0}; // REMOVED - now in g_unified_cache.relay_info
|
||||
|
||||
|
||||
// NIP-40 Expiration configuration (now in nip040.c)
|
||||
extern struct expiration_config g_expiration_config;
|
||||
|
||||
@@ -92,12 +88,6 @@ extern subscription_manager_t g_subscription_manager;
|
||||
/////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// Forward declarations for logging functions - REMOVED (replaced by debug system)
|
||||
|
||||
// Forward declaration for subscription manager configuration
|
||||
void update_subscription_manager_config(void);
|
||||
|
||||
@@ -105,7 +95,6 @@ void update_subscription_manager_config(void);
|
||||
void log_subscription_created(const subscription_t* sub);
|
||||
void log_subscription_closed(const char* sub_id, const char* client_ip, const char* reason);
|
||||
void log_subscription_disconnected(const char* client_ip);
|
||||
void log_event_broadcast(const char* event_id, const char* sub_id, const char* client_ip);
|
||||
void update_subscription_events_sent(const char* sub_id, int events_sent);
|
||||
|
||||
// Forward declarations for NIP-01 event handling
|
||||
@@ -158,6 +147,10 @@ int mark_event_as_deleted(const char* event_id, const char* deletion_event_id, c
|
||||
|
||||
// Forward declaration for database functions
|
||||
int store_event(cJSON* event);
|
||||
cJSON* retrieve_event(const char* event_id);
|
||||
|
||||
// Forward declaration for monitoring system
|
||||
void monitoring_on_event_stored(void);
|
||||
|
||||
// Forward declarations for NIP-11 relay information handling
|
||||
void init_relay_info();
|
||||
@@ -216,23 +209,21 @@ void signal_handler(int sig) {
|
||||
// Send NOTICE message to client (NIP-01)
|
||||
void send_notice_message(struct lws* wsi, const char* message) {
|
||||
if (!wsi || !message) return;
|
||||
|
||||
|
||||
cJSON* notice_msg = cJSON_CreateArray();
|
||||
cJSON_AddItemToArray(notice_msg, cJSON_CreateString("NOTICE"));
|
||||
cJSON_AddItemToArray(notice_msg, cJSON_CreateString(message));
|
||||
|
||||
|
||||
char* msg_str = cJSON_Print(notice_msg);
|
||||
if (msg_str) {
|
||||
size_t msg_len = strlen(msg_str);
|
||||
unsigned char* buf = malloc(LWS_PRE + msg_len);
|
||||
if (buf) {
|
||||
memcpy(buf + LWS_PRE, msg_str, msg_len);
|
||||
lws_write(wsi, buf + LWS_PRE, msg_len, LWS_WRITE_TEXT);
|
||||
free(buf);
|
||||
// Use proper message queue system instead of direct lws_write
|
||||
if (queue_message(wsi, NULL, msg_str, msg_len, LWS_WRITE_TEXT) != 0) {
|
||||
DEBUG_ERROR("Failed to queue NOTICE message");
|
||||
}
|
||||
free(msg_str);
|
||||
}
|
||||
|
||||
|
||||
cJSON_Delete(notice_msg);
|
||||
}
|
||||
|
||||
@@ -322,14 +313,35 @@ int init_database(const char* database_path_override) {
|
||||
if (g_debug_level >= DEBUG_LEVEL_DEBUG) {
|
||||
// Check config table row count immediately after database open
|
||||
sqlite3_stmt* stmt;
|
||||
if (sqlite3_prepare_v2(g_db, "SELECT COUNT(*) FROM config", -1, &stmt, NULL) == SQLITE_OK) {
|
||||
int rc = sqlite3_prepare_v2(g_db, "SELECT COUNT(*) FROM config", -1, &stmt, NULL);
|
||||
if (rc == SQLITE_OK) {
|
||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
int row_count = sqlite3_column_int(stmt, 0);
|
||||
DEBUG_LOG("Config table row count immediately after sqlite3_open(): %d", row_count);
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
} else {
|
||||
DEBUG_LOG("Config table does not exist yet (first-time startup)");
|
||||
// Capture and log the actual SQLite error instead of assuming table doesn't exist
|
||||
const char* err_msg = sqlite3_errmsg(g_db);
|
||||
DEBUG_LOG("Failed to prepare config table query: %s (error code: %d)", err_msg, rc);
|
||||
|
||||
// Check if it's actually a missing table vs other error
|
||||
if (rc == SQLITE_ERROR) {
|
||||
// Try to check if config table exists
|
||||
sqlite3_stmt* check_stmt;
|
||||
int check_rc = sqlite3_prepare_v2(g_db, "SELECT name FROM sqlite_master WHERE type='table' AND name='config'", -1, &check_stmt, NULL);
|
||||
if (check_rc == SQLITE_OK) {
|
||||
int has_table = (sqlite3_step(check_stmt) == SQLITE_ROW);
|
||||
sqlite3_finalize(check_stmt);
|
||||
if (has_table) {
|
||||
DEBUG_LOG("Config table EXISTS but query failed - possible database corruption or locking issue");
|
||||
} else {
|
||||
DEBUG_LOG("Config table does not exist yet (first-time startup)");
|
||||
}
|
||||
} else {
|
||||
DEBUG_LOG("Failed to check table existence: %s (error code: %d)", sqlite3_errmsg(g_db), check_rc);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// DEBUG_GUARD_END
|
||||
@@ -576,93 +588,6 @@ const char* extract_d_tag_value(cJSON* tags) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// Check and handle replaceable events according to NIP-01
|
||||
int check_and_handle_replaceable_event(int kind, const char* pubkey, long created_at) {
|
||||
if (!g_db || !pubkey) return 0;
|
||||
|
||||
const char* sql =
|
||||
"SELECT created_at FROM events WHERE kind = ? AND pubkey = ? ORDER BY created_at DESC LIMIT 1";
|
||||
|
||||
sqlite3_stmt* stmt;
|
||||
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
||||
if (rc != SQLITE_OK) {
|
||||
return 0; // Allow storage on DB error
|
||||
}
|
||||
|
||||
sqlite3_bind_int(stmt, 1, kind);
|
||||
sqlite3_bind_text(stmt, 2, pubkey, -1, SQLITE_STATIC);
|
||||
|
||||
int result = 0;
|
||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
long existing_created_at = sqlite3_column_int64(stmt, 0);
|
||||
if (created_at <= existing_created_at) {
|
||||
result = -1; // Older or same timestamp, reject
|
||||
} else {
|
||||
// Delete older versions
|
||||
const char* delete_sql = "DELETE FROM events WHERE kind = ? AND pubkey = ? AND created_at < ?";
|
||||
sqlite3_stmt* delete_stmt;
|
||||
if (sqlite3_prepare_v2(g_db, delete_sql, -1, &delete_stmt, NULL) == SQLITE_OK) {
|
||||
sqlite3_bind_int(delete_stmt, 1, kind);
|
||||
sqlite3_bind_text(delete_stmt, 2, pubkey, -1, SQLITE_STATIC);
|
||||
sqlite3_bind_int64(delete_stmt, 3, created_at);
|
||||
sqlite3_step(delete_stmt);
|
||||
sqlite3_finalize(delete_stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sqlite3_finalize(stmt);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Check and handle addressable events according to NIP-01
|
||||
int check_and_handle_addressable_event(int kind, const char* pubkey, const char* d_tag_value, long created_at) {
|
||||
if (!g_db || !pubkey) return 0;
|
||||
|
||||
// If no d tag, treat as regular replaceable
|
||||
if (!d_tag_value) {
|
||||
return check_and_handle_replaceable_event(kind, pubkey, created_at);
|
||||
}
|
||||
|
||||
const char* sql =
|
||||
"SELECT created_at FROM events WHERE kind = ? AND pubkey = ? AND json_extract(tags, '$[*][1]') = ? "
|
||||
"AND json_extract(tags, '$[*][0]') = 'd' ORDER BY created_at DESC LIMIT 1";
|
||||
|
||||
sqlite3_stmt* stmt;
|
||||
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
||||
if (rc != SQLITE_OK) {
|
||||
return 0; // Allow storage on DB error
|
||||
}
|
||||
|
||||
sqlite3_bind_int(stmt, 1, kind);
|
||||
sqlite3_bind_text(stmt, 2, pubkey, -1, SQLITE_STATIC);
|
||||
sqlite3_bind_text(stmt, 3, d_tag_value, -1, SQLITE_STATIC);
|
||||
|
||||
int result = 0;
|
||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
long existing_created_at = sqlite3_column_int64(stmt, 0);
|
||||
if (created_at <= existing_created_at) {
|
||||
result = -1; // Older or same timestamp, reject
|
||||
} else {
|
||||
// Delete older versions with same kind, pubkey, and d tag
|
||||
const char* delete_sql =
|
||||
"DELETE FROM events WHERE kind = ? AND pubkey = ? AND created_at < ? "
|
||||
"AND json_extract(tags, '$[*][1]') = ? AND json_extract(tags, '$[*][0]') = 'd'";
|
||||
sqlite3_stmt* delete_stmt;
|
||||
if (sqlite3_prepare_v2(g_db, delete_sql, -1, &delete_stmt, NULL) == SQLITE_OK) {
|
||||
sqlite3_bind_int(delete_stmt, 1, kind);
|
||||
sqlite3_bind_text(delete_stmt, 2, pubkey, -1, SQLITE_STATIC);
|
||||
sqlite3_bind_int64(delete_stmt, 3, created_at);
|
||||
sqlite3_bind_text(delete_stmt, 4, d_tag_value, -1, SQLITE_STATIC);
|
||||
sqlite3_step(delete_stmt);
|
||||
sqlite3_finalize(delete_stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sqlite3_finalize(stmt);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Store event in database
|
||||
int store_event(cJSON* event) {
|
||||
@@ -686,7 +611,14 @@ int store_event(cJSON* event) {
|
||||
|
||||
// Classify event type
|
||||
event_type_t type = classify_event_kind((int)cJSON_GetNumberValue(kind));
|
||||
|
||||
|
||||
// EPHEMERAL EVENTS (kinds 20000-29999) should NOT be stored
|
||||
if (type == EVENT_TYPE_EPHEMERAL) {
|
||||
DEBUG_LOG("Ephemeral event (kind %d) - broadcasting only, not storing",
|
||||
(int)cJSON_GetNumberValue(kind));
|
||||
return 0; // Success - event was handled but not stored
|
||||
}
|
||||
|
||||
// Serialize tags to JSON (use empty array if no tags)
|
||||
char* tags_json = NULL;
|
||||
if (tags && cJSON_IsArray(tags)) {
|
||||
@@ -725,11 +657,36 @@ int store_event(cJSON* event) {
|
||||
|
||||
// Execute statement
|
||||
rc = sqlite3_step(stmt);
|
||||
if (rc != SQLITE_DONE) {
|
||||
const char* err_msg = sqlite3_errmsg(g_db);
|
||||
int extended_errcode = sqlite3_extended_errcode(g_db);
|
||||
DEBUG_ERROR("INSERT failed: rc=%d, extended_errcode=%d, msg=%s", rc, extended_errcode, err_msg);
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
|
||||
if (rc != SQLITE_DONE) {
|
||||
if (rc == SQLITE_CONSTRAINT) {
|
||||
DEBUG_WARN("Event already exists in database");
|
||||
|
||||
// Add TRACE level debug to show both events
|
||||
if (g_debug_level >= DEBUG_LEVEL_TRACE) {
|
||||
// Get the existing event from database
|
||||
cJSON* existing_event = retrieve_event(cJSON_GetStringValue(id));
|
||||
if (existing_event) {
|
||||
char* existing_json = cJSON_Print(existing_event);
|
||||
DEBUG_TRACE("EXISTING EVENT: %s", existing_json ? existing_json : "NULL");
|
||||
free(existing_json);
|
||||
cJSON_Delete(existing_event);
|
||||
} else {
|
||||
DEBUG_TRACE("EXISTING EVENT: Could not retrieve existing event");
|
||||
}
|
||||
|
||||
// Show the event we're trying to insert
|
||||
char* new_json = cJSON_Print(event);
|
||||
DEBUG_TRACE("NEW EVENT: %s", new_json ? new_json : "NULL");
|
||||
free(new_json);
|
||||
}
|
||||
|
||||
free(tags_json);
|
||||
return 0; // Not an error, just duplicate
|
||||
}
|
||||
@@ -739,8 +696,12 @@ int store_event(cJSON* event) {
|
||||
free(tags_json);
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
free(tags_json);
|
||||
|
||||
// Call monitoring hook after successful event storage
|
||||
monitoring_on_event_stored();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -917,12 +878,11 @@ int handle_req_message(const char* sub_id, cJSON* filters, struct lws *wsi, stru
|
||||
char* msg_str = cJSON_Print(event_msg);
|
||||
if (msg_str) {
|
||||
size_t msg_len = strlen(msg_str);
|
||||
unsigned char* buf = malloc(LWS_PRE + msg_len);
|
||||
if (buf) {
|
||||
memcpy(buf + LWS_PRE, msg_str, msg_len);
|
||||
lws_write(wsi, buf + LWS_PRE, msg_len, LWS_WRITE_TEXT);
|
||||
// Use proper message queue system instead of direct lws_write
|
||||
if (queue_message(wsi, NULL, msg_str, msg_len, LWS_WRITE_TEXT) != 0) {
|
||||
DEBUG_ERROR("Failed to queue config EVENT message");
|
||||
} else {
|
||||
config_events_sent++;
|
||||
free(buf);
|
||||
}
|
||||
free(msg_str);
|
||||
}
|
||||
@@ -960,11 +920,9 @@ int handle_req_message(const char* sub_id, cJSON* filters, struct lws *wsi, stru
|
||||
char* closed_str = cJSON_Print(closed_msg);
|
||||
if (closed_str) {
|
||||
size_t closed_len = strlen(closed_str);
|
||||
unsigned char* buf = malloc(LWS_PRE + closed_len);
|
||||
if (buf) {
|
||||
memcpy(buf + LWS_PRE, closed_str, closed_len);
|
||||
lws_write(wsi, buf + LWS_PRE, closed_len, LWS_WRITE_TEXT);
|
||||
free(buf);
|
||||
// Use proper message queue system instead of direct lws_write
|
||||
if (queue_message(wsi, pss, closed_str, closed_len, LWS_WRITE_TEXT) != 0) {
|
||||
DEBUG_ERROR("Failed to queue CLOSED message");
|
||||
}
|
||||
free(closed_str);
|
||||
}
|
||||
@@ -1273,10 +1231,8 @@ int handle_req_message(const char* sub_id, cJSON* filters, struct lws *wsi, stru
|
||||
cJSON_AddItemToObject(event, "tags", tags);
|
||||
|
||||
// Check expiration filtering (NIP-40) at application level
|
||||
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||
int expiration_enabled = g_unified_cache.expiration_config.enabled;
|
||||
int filter_responses = g_unified_cache.expiration_config.filter_responses;
|
||||
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||
int expiration_enabled = get_config_bool("expiration_enabled", 1);
|
||||
int filter_responses = get_config_bool("expiration_filter", 1);
|
||||
|
||||
if (expiration_enabled && filter_responses) {
|
||||
time_t current_time = time(NULL);
|
||||
@@ -1292,19 +1248,17 @@ int handle_req_message(const char* sub_id, cJSON* filters, struct lws *wsi, stru
|
||||
cJSON_AddItemToArray(event_msg, cJSON_CreateString("EVENT"));
|
||||
cJSON_AddItemToArray(event_msg, cJSON_CreateString(sub_id));
|
||||
cJSON_AddItemToArray(event_msg, event);
|
||||
|
||||
|
||||
char* msg_str = cJSON_Print(event_msg);
|
||||
if (msg_str) {
|
||||
size_t msg_len = strlen(msg_str);
|
||||
unsigned char* buf = malloc(LWS_PRE + msg_len);
|
||||
if (buf) {
|
||||
memcpy(buf + LWS_PRE, msg_str, msg_len);
|
||||
lws_write(wsi, buf + LWS_PRE, msg_len, LWS_WRITE_TEXT);
|
||||
free(buf);
|
||||
// Use proper message queue system instead of direct lws_write
|
||||
if (queue_message(wsi, pss, msg_str, msg_len, LWS_WRITE_TEXT) != 0) {
|
||||
DEBUG_ERROR("Failed to queue EVENT message for sub=%s", sub_id);
|
||||
}
|
||||
free(msg_str);
|
||||
}
|
||||
|
||||
|
||||
cJSON_Delete(event_msg);
|
||||
events_sent++;
|
||||
}
|
||||
@@ -1440,7 +1394,7 @@ void print_usage(const char* program_name) {
|
||||
printf("Options:\n");
|
||||
printf(" -h, --help Show this help message\n");
|
||||
printf(" -v, --version Show version information\n");
|
||||
printf(" -p, --port PORT Override relay port (first-time startup only)\n");
|
||||
printf(" -p, --port PORT Override relay port (first-time startup and existing relay restarts)\n");
|
||||
printf(" --strict-port Fail if exact port is unavailable (no port increment)\n");
|
||||
printf(" -a, --admin-pubkey KEY Override admin public key (64-char hex or npub)\n");
|
||||
printf(" -r, --relay-privkey KEY Override relay private key (64-char hex or nsec)\n");
|
||||
@@ -1450,13 +1404,14 @@ void print_usage(const char* program_name) {
|
||||
printf("Configuration:\n");
|
||||
printf(" This relay uses event-based configuration stored in the database.\n");
|
||||
printf(" On first startup, keys are automatically generated and printed once.\n");
|
||||
printf(" Command line options like --port only apply during first-time setup.\n");
|
||||
printf(" Command line options like --port apply during first-time setup and existing relay restarts.\n");
|
||||
printf(" After initial setup, all configuration is managed via database events.\n");
|
||||
printf(" Database file: <relay_pubkey>.db (created automatically)\n");
|
||||
printf("\n");
|
||||
printf("Port Binding:\n");
|
||||
printf(" Default: Try up to 10 consecutive ports if requested port is busy\n");
|
||||
printf(" --strict-port: Fail immediately if exact requested port is unavailable\n");
|
||||
printf(" --strict-port works with any custom port specified via -p or --port\n");
|
||||
printf("\n");
|
||||
printf("Examples:\n");
|
||||
printf(" %s # Start relay (auto-configure on first run)\n", program_name);
|
||||
@@ -1632,7 +1587,10 @@ int main(int argc, char* argv[]) {
|
||||
}
|
||||
|
||||
// Run first-time startup sequence (generates keys, sets up database path, but doesn't store private key yet)
|
||||
if (first_time_startup_sequence(&cli_options) != 0) {
|
||||
char admin_pubkey[65] = {0};
|
||||
char relay_pubkey[65] = {0};
|
||||
char relay_privkey[65] = {0};
|
||||
if (first_time_startup_sequence(&cli_options, admin_pubkey, relay_pubkey, relay_privkey) != 0) {
|
||||
DEBUG_ERROR("Failed to complete first-time startup sequence");
|
||||
cleanup_configuration_system();
|
||||
nostr_cleanup();
|
||||
@@ -1662,86 +1620,47 @@ int main(int argc, char* argv[]) {
|
||||
}
|
||||
// DEBUG_GUARD_END
|
||||
|
||||
// Now that database is available, populate the complete config table atomically
|
||||
// BUG FIX: Use the pubkeys returned from first_time_startup_sequence instead of trying to read from empty database
|
||||
DEBUG_LOG("Using pubkeys from first-time startup sequence for config population");
|
||||
DEBUG_LOG("admin_pubkey from startup: %s", admin_pubkey);
|
||||
DEBUG_LOG("relay_pubkey from startup: %s", relay_pubkey);
|
||||
|
||||
if (populate_all_config_values_atomic(admin_pubkey, relay_pubkey) != 0) {
|
||||
DEBUG_ERROR("Failed to populate complete config table");
|
||||
cleanup_configuration_system();
|
||||
nostr_cleanup();
|
||||
close_database();
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Apply CLI overrides atomically (after complete config table exists)
|
||||
if (apply_cli_overrides_atomic(&cli_options) != 0) {
|
||||
DEBUG_ERROR("Failed to apply CLI overrides");
|
||||
cleanup_configuration_system();
|
||||
nostr_cleanup();
|
||||
close_database();
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Now that database is available, store the relay private key securely
|
||||
const char* relay_privkey = get_temp_relay_private_key();
|
||||
if (relay_privkey) {
|
||||
if (relay_privkey[0] != '\0') {
|
||||
if (store_relay_private_key(relay_privkey) != 0) {
|
||||
DEBUG_ERROR("Failed to store relay private key securely after database initialization");
|
||||
cleanup_configuration_system();
|
||||
nostr_cleanup();
|
||||
close_database();
|
||||
return 1;
|
||||
}
|
||||
} else {
|
||||
DEBUG_ERROR("Relay private key not available from first-time startup");
|
||||
cleanup_configuration_system();
|
||||
nostr_cleanup();
|
||||
return 1;
|
||||
}
|
||||
|
||||
// COMMENTED OUT: Old incremental config building code replaced by unified startup sequence
|
||||
// The new first_time_startup_sequence() function handles all config creation atomically
|
||||
/*
|
||||
// Handle configuration setup after database is initialized
|
||||
// Always populate defaults directly in config table (abandoning legacy event signing)
|
||||
|
||||
// Populate default config values in table
|
||||
if (populate_default_config_values() != 0) {
|
||||
DEBUG_ERROR("Failed to populate default config values");
|
||||
cleanup_configuration_system();
|
||||
nostr_cleanup();
|
||||
close_database();
|
||||
return 1;
|
||||
}
|
||||
|
||||
// DEBUG_GUARD_START
|
||||
if (g_debug_level >= DEBUG_LEVEL_DEBUG) {
|
||||
sqlite3_stmt* stmt;
|
||||
if (sqlite3_prepare_v2(g_db, "SELECT COUNT(*) FROM config", -1, &stmt, NULL) == SQLITE_OK) {
|
||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
int row_count = sqlite3_column_int(stmt, 0);
|
||||
DEBUG_LOG("Config table row count after populate_default_config_values(): %d", row_count);
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
}
|
||||
// DEBUG_GUARD_END
|
||||
|
||||
// Apply CLI overrides now that database is available
|
||||
if (cli_options.port_override > 0) {
|
||||
char port_str[16];
|
||||
snprintf(port_str, sizeof(port_str), "%d", cli_options.port_override);
|
||||
if (update_config_in_table("relay_port", port_str) != 0) {
|
||||
DEBUG_ERROR("Failed to update relay port override in config table");
|
||||
cleanup_configuration_system();
|
||||
nostr_cleanup();
|
||||
close_database();
|
||||
return 1;
|
||||
}
|
||||
printf(" Port: %d (overriding default)\n", cli_options.port_override);
|
||||
}
|
||||
|
||||
// Add pubkeys to config table (single authoritative call)
|
||||
if (add_pubkeys_to_config_table() != 0) {
|
||||
DEBUG_ERROR("Failed to add pubkeys to config table");
|
||||
cleanup_configuration_system();
|
||||
nostr_cleanup();
|
||||
close_database();
|
||||
return 1;
|
||||
}
|
||||
|
||||
// DEBUG_GUARD_START
|
||||
if (g_debug_level >= DEBUG_LEVEL_DEBUG) {
|
||||
sqlite3_stmt* stmt;
|
||||
if (sqlite3_prepare_v2(g_db, "SELECT COUNT(*) FROM config", -1, &stmt, NULL) == SQLITE_OK) {
|
||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
int row_count = sqlite3_column_int(stmt, 0);
|
||||
DEBUG_LOG("Config table row count after add_pubkeys_to_config_table() (first-time): %d", row_count);
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
}
|
||||
// DEBUG_GUARD_END
|
||||
*/
|
||||
} else {
|
||||
// Find existing database file
|
||||
char** existing_files = find_existing_db_files();
|
||||
@@ -1776,7 +1695,7 @@ int main(int argc, char* argv[]) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Setup existing relay (sets database path and loads config)
|
||||
// Setup existing relay FIRST (sets database path)
|
||||
if (startup_existing_relay(relay_pubkey, &cli_options) != 0) {
|
||||
DEBUG_ERROR("Failed to setup existing relay");
|
||||
cleanup_configuration_system();
|
||||
@@ -1789,23 +1708,7 @@ int main(int argc, char* argv[]) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Check config table row count before database initialization
|
||||
{
|
||||
sqlite3* temp_db = NULL;
|
||||
if (sqlite3_open(g_database_path, &temp_db) == SQLITE_OK) {
|
||||
sqlite3_stmt* stmt;
|
||||
if (sqlite3_prepare_v2(temp_db, "SELECT COUNT(*) FROM config", -1, &stmt, NULL) == SQLITE_OK) {
|
||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
int row_count = sqlite3_column_int(stmt, 0);
|
||||
printf(" Config table row count before database initialization: %d\n", row_count);
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
sqlite3_close(temp_db);
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize database with existing database path
|
||||
// Initialize database with the database path set by startup_existing_relay()
|
||||
DEBUG_TRACE("Initializing existing database");
|
||||
if (init_database(g_database_path) != 0) {
|
||||
DEBUG_ERROR("Failed to initialize existing database");
|
||||
@@ -1820,6 +1723,20 @@ int main(int argc, char* argv[]) {
|
||||
}
|
||||
DEBUG_LOG("Existing database initialized");
|
||||
|
||||
// Apply CLI overrides atomically (now that database is initialized)
|
||||
if (apply_cli_overrides_atomic(&cli_options) != 0) {
|
||||
DEBUG_ERROR("Failed to apply CLI overrides for existing relay");
|
||||
cleanup_configuration_system();
|
||||
free(relay_pubkey);
|
||||
for (int i = 0; existing_files[i]; i++) {
|
||||
free(existing_files[i]);
|
||||
}
|
||||
free(existing_files);
|
||||
nostr_cleanup();
|
||||
close_database();
|
||||
return 1;
|
||||
}
|
||||
|
||||
// DEBUG_GUARD_START
|
||||
if (g_debug_level >= DEBUG_LEVEL_DEBUG) {
|
||||
sqlite3_stmt* stmt;
|
||||
@@ -1831,103 +1748,7 @@ int main(int argc, char* argv[]) {
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
}
|
||||
// DEBUG_GUARD_END
|
||||
|
||||
// COMMENTED OUT: Old incremental config building code replaced by unified startup sequence
|
||||
// The new startup_existing_relay() function handles all config loading atomically
|
||||
/*
|
||||
// Ensure default configuration values are populated (for any missing keys)
|
||||
// This must be done AFTER database initialization
|
||||
// COMMENTED OUT: Don't modify existing database config on restart
|
||||
// if (populate_default_config_values() != 0) {
|
||||
// DEBUG_WARN("Failed to populate default config values for existing relay - continuing");
|
||||
// }
|
||||
|
||||
// Load configuration from database
|
||||
cJSON* config_event = load_config_event_from_database(relay_pubkey);
|
||||
if (config_event) {
|
||||
if (apply_configuration_from_event(config_event) != 0) {
|
||||
DEBUG_WARN("Failed to apply configuration from database");
|
||||
}
|
||||
cJSON_Delete(config_event);
|
||||
} else {
|
||||
// This is expected for relays using table-based configuration
|
||||
// No longer a warning - just informational
|
||||
}
|
||||
|
||||
// DEBUG_GUARD_START
|
||||
if (g_debug_level >= DEBUG_LEVEL_DEBUG) {
|
||||
sqlite3_stmt* stmt;
|
||||
if (sqlite3_prepare_v2(g_db, "SELECT COUNT(*) FROM config", -1, &stmt, NULL) == SQLITE_OK) {
|
||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
int row_count = sqlite3_column_int(stmt, 0);
|
||||
DEBUG_LOG("Config table row count before checking pubkeys: %d", row_count);
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
}
|
||||
// DEBUG_GUARD_END
|
||||
|
||||
// Ensure pubkeys are in config table for existing relay
|
||||
// This handles migration from old event-based config to table-based config
|
||||
const char* admin_pubkey_from_table = get_config_value_from_table("admin_pubkey");
|
||||
const char* relay_pubkey_from_table = get_config_value_from_table("relay_pubkey");
|
||||
|
||||
int need_to_add_pubkeys = 0;
|
||||
|
||||
// Check if admin_pubkey is missing or invalid
|
||||
if (!admin_pubkey_from_table || strlen(admin_pubkey_from_table) != 64) {
|
||||
DEBUG_WARN("Admin pubkey missing or invalid in config table - will regenerate from cache");
|
||||
need_to_add_pubkeys = 1;
|
||||
}
|
||||
if (admin_pubkey_from_table) free((char*)admin_pubkey_from_table);
|
||||
|
||||
// Check if relay_pubkey is missing or invalid
|
||||
if (!relay_pubkey_from_table || strlen(relay_pubkey_from_table) != 64) {
|
||||
DEBUG_WARN("Relay pubkey missing or invalid in config table - will regenerate from cache");
|
||||
need_to_add_pubkeys = 1;
|
||||
}
|
||||
if (relay_pubkey_from_table) free((char*)relay_pubkey_from_table);
|
||||
|
||||
// If either pubkey is missing, call add_pubkeys_to_config_table to populate both
|
||||
if (need_to_add_pubkeys) {
|
||||
if (add_pubkeys_to_config_table() != 0) {
|
||||
DEBUG_ERROR("Failed to add pubkeys to config table for existing relay");
|
||||
cleanup_configuration_system();
|
||||
nostr_cleanup();
|
||||
close_database();
|
||||
return 1;
|
||||
}
|
||||
|
||||
// DEBUG_GUARD_START
|
||||
if (g_debug_level >= DEBUG_LEVEL_DEBUG) {
|
||||
sqlite3_stmt* stmt;
|
||||
if (sqlite3_prepare_v2(g_db, "SELECT COUNT(*) FROM config", -1, &stmt, NULL) == SQLITE_OK) {
|
||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
int row_count = sqlite3_column_int(stmt, 0);
|
||||
DEBUG_LOG("Config table row count after add_pubkeys_to_config_table(): %d", row_count);
|
||||
}
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
}
|
||||
// DEBUG_GUARD_END
|
||||
}
|
||||
|
||||
// Apply CLI overrides for existing relay (port override should work even for existing relays)
|
||||
if (cli_options.port_override > 0) {
|
||||
char port_str[16];
|
||||
snprintf(port_str, sizeof(port_str), "%d", cli_options.port_override);
|
||||
if (update_config_in_table("relay_port", port_str) != 0) {
|
||||
DEBUG_ERROR("Failed to update relay port override in config table for existing relay");
|
||||
cleanup_configuration_system();
|
||||
nostr_cleanup();
|
||||
close_database();
|
||||
return 1;
|
||||
}
|
||||
printf(" Port: %d (overriding configured port)\n", cli_options.port_override);
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
// Free memory
|
||||
free(relay_pubkey);
|
||||
for (int i = 0; existing_files[i]; i++) {
|
||||
@@ -1964,6 +1785,7 @@ int main(int argc, char* argv[]) {
|
||||
|
||||
// Initialize NIP-40 expiration configuration
|
||||
init_expiration_config();
|
||||
|
||||
// Update subscription manager configuration
|
||||
update_subscription_manager_config();
|
||||
|
||||
@@ -1985,10 +1807,11 @@ int main(int argc, char* argv[]) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Cleanup orphaned subscriptions from previous runs
|
||||
cleanup_all_subscriptions_on_startup();
|
||||
|
||||
|
||||
// Start WebSocket Nostr relay server (port from configuration)
|
||||
int result = start_websocket_relay(-1, cli_options.strict_port); // Let config system determine port, pass strict_port flag
|
||||
// Start WebSocket Nostr relay server (port from CLI override or configuration)
|
||||
int result = start_websocket_relay(cli_options.port_override, cli_options.strict_port); // Use CLI port override if specified, otherwise config
|
||||
|
||||
// Cleanup
|
||||
cleanup_relay_info();
|
||||
|
||||
12
src/main.h
@@ -10,10 +10,14 @@
|
||||
#define MAIN_H
|
||||
|
||||
// Version information (auto-updated by build system)
|
||||
#define VERSION "v0.4.6"
|
||||
#define VERSION_MAJOR 0
|
||||
#define VERSION_MINOR 4
|
||||
#define VERSION_PATCH 6
|
||||
#define VERSION_MAJOR 1
|
||||
#define VERSION_MINOR 0
|
||||
#define VERSION_PATCH 8
|
||||
#define VERSION "v1.0.8"
|
||||
|
||||
// Avoid VERSION_MAJOR redefinition warning from nostr_core_lib
|
||||
#undef VERSION_MAJOR
|
||||
#define VERSION_MAJOR 1
|
||||
|
||||
// Relay metadata (authoritative source for NIP-11 information)
|
||||
#define RELAY_NAME "C-Relay"
|
||||
|
||||
556
src/nip011.c
@@ -15,8 +15,7 @@ const char* get_config_value(const char* key);
|
||||
int get_config_int(const char* key, int default_value);
|
||||
int get_config_bool(const char* key, int default_value);
|
||||
|
||||
// Forward declarations for global cache access
|
||||
extern unified_config_cache_t g_unified_cache;
|
||||
// NIP-11 relay information is now managed directly from config table
|
||||
|
||||
// Forward declarations for constants (defined in config.h and other headers)
|
||||
#define HTTP_STATUS_OK 200
|
||||
@@ -75,185 +74,14 @@ cJSON* parse_comma_separated_array(const char* csv_string) {
|
||||
|
||||
// Initialize relay information using configuration system
|
||||
void init_relay_info() {
|
||||
// Get all config values first (without holding mutex to avoid deadlock)
|
||||
// Note: These may be dynamically allocated strings that need to be freed
|
||||
const char* relay_name = get_config_value("relay_name");
|
||||
const char* relay_description = get_config_value("relay_description");
|
||||
const char* relay_software = get_config_value("relay_software");
|
||||
const char* relay_version = get_config_value("relay_version");
|
||||
const char* relay_contact = get_config_value("relay_contact");
|
||||
const char* relay_pubkey = get_config_value("relay_pubkey");
|
||||
const char* supported_nips_csv = get_config_value("supported_nips");
|
||||
const char* language_tags_csv = get_config_value("language_tags");
|
||||
const char* relay_countries_csv = get_config_value("relay_countries");
|
||||
const char* posting_policy = get_config_value("posting_policy");
|
||||
const char* payments_url = get_config_value("payments_url");
|
||||
|
||||
// Get config values for limitations
|
||||
int max_message_length = get_config_int("max_message_length", 16384);
|
||||
int max_subscriptions_per_client = get_config_int("max_subscriptions_per_client", 20);
|
||||
int max_limit = get_config_int("max_limit", 5000);
|
||||
int max_event_tags = get_config_int("max_event_tags", 100);
|
||||
int max_content_length = get_config_int("max_content_length", 8196);
|
||||
int default_limit = get_config_int("default_limit", 500);
|
||||
int admin_enabled = get_config_bool("admin_enabled", 0);
|
||||
|
||||
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||
|
||||
// Update relay information fields
|
||||
if (relay_name) {
|
||||
strncpy(g_unified_cache.relay_info.name, relay_name, sizeof(g_unified_cache.relay_info.name) - 1);
|
||||
free((char*)relay_name); // Free dynamically allocated string
|
||||
} else {
|
||||
strncpy(g_unified_cache.relay_info.name, "C Nostr Relay", sizeof(g_unified_cache.relay_info.name) - 1);
|
||||
}
|
||||
|
||||
if (relay_description) {
|
||||
strncpy(g_unified_cache.relay_info.description, relay_description, sizeof(g_unified_cache.relay_info.description) - 1);
|
||||
free((char*)relay_description); // Free dynamically allocated string
|
||||
} else {
|
||||
strncpy(g_unified_cache.relay_info.description, "A high-performance Nostr relay implemented in C with SQLite storage", sizeof(g_unified_cache.relay_info.description) - 1);
|
||||
}
|
||||
|
||||
if (relay_software) {
|
||||
strncpy(g_unified_cache.relay_info.software, relay_software, sizeof(g_unified_cache.relay_info.software) - 1);
|
||||
free((char*)relay_software); // Free dynamically allocated string
|
||||
} else {
|
||||
strncpy(g_unified_cache.relay_info.software, "https://git.laantungir.net/laantungir/c-relay.git", sizeof(g_unified_cache.relay_info.software) - 1);
|
||||
}
|
||||
|
||||
if (relay_version) {
|
||||
strncpy(g_unified_cache.relay_info.version, relay_version, sizeof(g_unified_cache.relay_info.version) - 1);
|
||||
free((char*)relay_version); // Free dynamically allocated string
|
||||
} else {
|
||||
strncpy(g_unified_cache.relay_info.version, "0.2.0", sizeof(g_unified_cache.relay_info.version) - 1);
|
||||
}
|
||||
|
||||
if (relay_contact) {
|
||||
strncpy(g_unified_cache.relay_info.contact, relay_contact, sizeof(g_unified_cache.relay_info.contact) - 1);
|
||||
free((char*)relay_contact); // Free dynamically allocated string
|
||||
}
|
||||
|
||||
if (relay_pubkey) {
|
||||
strncpy(g_unified_cache.relay_info.pubkey, relay_pubkey, sizeof(g_unified_cache.relay_info.pubkey) - 1);
|
||||
free((char*)relay_pubkey); // Free dynamically allocated string
|
||||
}
|
||||
|
||||
if (posting_policy) {
|
||||
strncpy(g_unified_cache.relay_info.posting_policy, posting_policy, sizeof(g_unified_cache.relay_info.posting_policy) - 1);
|
||||
free((char*)posting_policy); // Free dynamically allocated string
|
||||
}
|
||||
|
||||
if (payments_url) {
|
||||
strncpy(g_unified_cache.relay_info.payments_url, payments_url, sizeof(g_unified_cache.relay_info.payments_url) - 1);
|
||||
free((char*)payments_url); // Free dynamically allocated string
|
||||
}
|
||||
|
||||
// Initialize supported NIPs array from config
|
||||
if (supported_nips_csv) {
|
||||
g_unified_cache.relay_info.supported_nips = parse_comma_separated_array(supported_nips_csv);
|
||||
free((char*)supported_nips_csv); // Free dynamically allocated string
|
||||
} else {
|
||||
// Fallback to default supported NIPs
|
||||
g_unified_cache.relay_info.supported_nips = cJSON_CreateArray();
|
||||
if (g_unified_cache.relay_info.supported_nips) {
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(1)); // NIP-01: Basic protocol
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(9)); // NIP-09: Event deletion
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(11)); // NIP-11: Relay information
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(13)); // NIP-13: Proof of Work
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(15)); // NIP-15: EOSE
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(20)); // NIP-20: Command results
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(40)); // NIP-40: Expiration Timestamp
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(42)); // NIP-42: Authentication
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize server limitations using configuration
|
||||
g_unified_cache.relay_info.limitation = cJSON_CreateObject();
|
||||
if (g_unified_cache.relay_info.limitation) {
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_message_length", max_message_length);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_subscriptions", max_subscriptions_per_client);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_limit", max_limit);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_subid_length", SUBSCRIPTION_ID_MAX_LENGTH);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_event_tags", max_event_tags);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_content_length", max_content_length);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "min_pow_difficulty", g_unified_cache.pow_config.min_pow_difficulty);
|
||||
cJSON_AddBoolToObject(g_unified_cache.relay_info.limitation, "auth_required", admin_enabled ? cJSON_True : cJSON_False);
|
||||
cJSON_AddBoolToObject(g_unified_cache.relay_info.limitation, "payment_required", cJSON_False);
|
||||
cJSON_AddBoolToObject(g_unified_cache.relay_info.limitation, "restricted_writes", cJSON_False);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "created_at_lower_limit", 0);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "created_at_upper_limit", 2147483647);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "default_limit", default_limit);
|
||||
}
|
||||
|
||||
// Initialize empty retention policies (can be configured later)
|
||||
g_unified_cache.relay_info.retention = cJSON_CreateArray();
|
||||
|
||||
// Initialize language tags from config
|
||||
if (language_tags_csv) {
|
||||
g_unified_cache.relay_info.language_tags = parse_comma_separated_array(language_tags_csv);
|
||||
free((char*)language_tags_csv); // Free dynamically allocated string
|
||||
} else {
|
||||
// Fallback to global
|
||||
g_unified_cache.relay_info.language_tags = cJSON_CreateArray();
|
||||
if (g_unified_cache.relay_info.language_tags) {
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.language_tags, cJSON_CreateString("*"));
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize relay countries from config
|
||||
if (relay_countries_csv) {
|
||||
g_unified_cache.relay_info.relay_countries = parse_comma_separated_array(relay_countries_csv);
|
||||
free((char*)relay_countries_csv); // Free dynamically allocated string
|
||||
} else {
|
||||
// Fallback to global
|
||||
g_unified_cache.relay_info.relay_countries = cJSON_CreateArray();
|
||||
if (g_unified_cache.relay_info.relay_countries) {
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.relay_countries, cJSON_CreateString("*"));
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize content tags as empty array
|
||||
g_unified_cache.relay_info.tags = cJSON_CreateArray();
|
||||
|
||||
// Initialize fees as empty object (no payment required by default)
|
||||
g_unified_cache.relay_info.fees = cJSON_CreateObject();
|
||||
|
||||
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||
// NIP-11 relay information is now generated dynamically from config table
|
||||
// No initialization needed - data is fetched directly from database when requested
|
||||
}
|
||||
|
||||
// Clean up relay information JSON objects
|
||||
void cleanup_relay_info() {
|
||||
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||
if (g_unified_cache.relay_info.supported_nips) {
|
||||
cJSON_Delete(g_unified_cache.relay_info.supported_nips);
|
||||
g_unified_cache.relay_info.supported_nips = NULL;
|
||||
}
|
||||
if (g_unified_cache.relay_info.limitation) {
|
||||
cJSON_Delete(g_unified_cache.relay_info.limitation);
|
||||
g_unified_cache.relay_info.limitation = NULL;
|
||||
}
|
||||
if (g_unified_cache.relay_info.retention) {
|
||||
cJSON_Delete(g_unified_cache.relay_info.retention);
|
||||
g_unified_cache.relay_info.retention = NULL;
|
||||
}
|
||||
if (g_unified_cache.relay_info.language_tags) {
|
||||
cJSON_Delete(g_unified_cache.relay_info.language_tags);
|
||||
g_unified_cache.relay_info.language_tags = NULL;
|
||||
}
|
||||
if (g_unified_cache.relay_info.relay_countries) {
|
||||
cJSON_Delete(g_unified_cache.relay_info.relay_countries);
|
||||
g_unified_cache.relay_info.relay_countries = NULL;
|
||||
}
|
||||
if (g_unified_cache.relay_info.tags) {
|
||||
cJSON_Delete(g_unified_cache.relay_info.tags);
|
||||
g_unified_cache.relay_info.tags = NULL;
|
||||
}
|
||||
if (g_unified_cache.relay_info.fees) {
|
||||
cJSON_Delete(g_unified_cache.relay_info.fees);
|
||||
g_unified_cache.relay_info.fees = NULL;
|
||||
}
|
||||
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||
// NIP-11 relay information is now generated dynamically from config table
|
||||
// No cleanup needed - data is fetched directly from database when requested
|
||||
}
|
||||
|
||||
// Generate NIP-11 compliant JSON document
|
||||
@@ -264,248 +92,194 @@ cJSON* generate_relay_info_json() {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||
// Get all config values directly from database
|
||||
const char* relay_name = get_config_value("relay_name");
|
||||
const char* relay_description = get_config_value("relay_description");
|
||||
const char* relay_banner = get_config_value("relay_banner");
|
||||
const char* relay_icon = get_config_value("relay_icon");
|
||||
const char* relay_pubkey = get_config_value("relay_pubkey");
|
||||
const char* relay_contact = get_config_value("relay_contact");
|
||||
const char* supported_nips_csv = get_config_value("supported_nips");
|
||||
const char* relay_software = get_config_value("relay_software");
|
||||
const char* relay_version = get_config_value("relay_version");
|
||||
const char* privacy_policy = get_config_value("privacy_policy");
|
||||
const char* terms_of_service = get_config_value("terms_of_service");
|
||||
const char* posting_policy = get_config_value("posting_policy");
|
||||
const char* language_tags_csv = get_config_value("language_tags");
|
||||
const char* relay_countries_csv = get_config_value("relay_countries");
|
||||
const char* payments_url = get_config_value("payments_url");
|
||||
|
||||
// Defensive reinit: if relay_info appears empty (cache refresh wiped it), rebuild it directly from table
|
||||
if (strlen(g_unified_cache.relay_info.name) == 0 &&
|
||||
strlen(g_unified_cache.relay_info.description) == 0 &&
|
||||
strlen(g_unified_cache.relay_info.software) == 0) {
|
||||
DEBUG_WARN("NIP-11 relay_info appears empty, rebuilding directly from config table");
|
||||
|
||||
// Rebuild relay_info directly from config table to avoid circular cache dependency
|
||||
// Get values directly from table (similar to init_relay_info but without cache calls)
|
||||
const char* relay_name = get_config_value_from_table("relay_name");
|
||||
if (relay_name) {
|
||||
strncpy(g_unified_cache.relay_info.name, relay_name, sizeof(g_unified_cache.relay_info.name) - 1);
|
||||
free((char*)relay_name);
|
||||
} else {
|
||||
strncpy(g_unified_cache.relay_info.name, "C Nostr Relay", sizeof(g_unified_cache.relay_info.name) - 1);
|
||||
}
|
||||
|
||||
const char* relay_description = get_config_value_from_table("relay_description");
|
||||
if (relay_description) {
|
||||
strncpy(g_unified_cache.relay_info.description, relay_description, sizeof(g_unified_cache.relay_info.description) - 1);
|
||||
free((char*)relay_description);
|
||||
} else {
|
||||
strncpy(g_unified_cache.relay_info.description, "A high-performance Nostr relay implemented in C with SQLite storage", sizeof(g_unified_cache.relay_info.description) - 1);
|
||||
}
|
||||
|
||||
const char* relay_software = get_config_value_from_table("relay_software");
|
||||
if (relay_software) {
|
||||
strncpy(g_unified_cache.relay_info.software, relay_software, sizeof(g_unified_cache.relay_info.software) - 1);
|
||||
free((char*)relay_software);
|
||||
} else {
|
||||
strncpy(g_unified_cache.relay_info.software, "https://git.laantungir.net/laantungir/c-relay.git", sizeof(g_unified_cache.relay_info.software) - 1);
|
||||
}
|
||||
|
||||
const char* relay_version = get_config_value_from_table("relay_version");
|
||||
if (relay_version) {
|
||||
strncpy(g_unified_cache.relay_info.version, relay_version, sizeof(g_unified_cache.relay_info.version) - 1);
|
||||
free((char*)relay_version);
|
||||
} else {
|
||||
strncpy(g_unified_cache.relay_info.version, "0.2.0", sizeof(g_unified_cache.relay_info.version) - 1);
|
||||
}
|
||||
|
||||
const char* relay_contact = get_config_value_from_table("relay_contact");
|
||||
if (relay_contact) {
|
||||
strncpy(g_unified_cache.relay_info.contact, relay_contact, sizeof(g_unified_cache.relay_info.contact) - 1);
|
||||
free((char*)relay_contact);
|
||||
}
|
||||
|
||||
const char* relay_pubkey = get_config_value_from_table("relay_pubkey");
|
||||
if (relay_pubkey) {
|
||||
strncpy(g_unified_cache.relay_info.pubkey, relay_pubkey, sizeof(g_unified_cache.relay_info.pubkey) - 1);
|
||||
free((char*)relay_pubkey);
|
||||
}
|
||||
|
||||
const char* posting_policy = get_config_value_from_table("posting_policy");
|
||||
if (posting_policy) {
|
||||
strncpy(g_unified_cache.relay_info.posting_policy, posting_policy, sizeof(g_unified_cache.relay_info.posting_policy) - 1);
|
||||
free((char*)posting_policy);
|
||||
}
|
||||
|
||||
const char* payments_url = get_config_value_from_table("payments_url");
|
||||
if (payments_url) {
|
||||
strncpy(g_unified_cache.relay_info.payments_url, payments_url, sizeof(g_unified_cache.relay_info.payments_url) - 1);
|
||||
free((char*)payments_url);
|
||||
}
|
||||
|
||||
// Rebuild supported_nips array
|
||||
const char* supported_nips_csv = get_config_value_from_table("supported_nips");
|
||||
if (supported_nips_csv) {
|
||||
g_unified_cache.relay_info.supported_nips = parse_comma_separated_array(supported_nips_csv);
|
||||
free((char*)supported_nips_csv);
|
||||
} else {
|
||||
g_unified_cache.relay_info.supported_nips = cJSON_CreateArray();
|
||||
if (g_unified_cache.relay_info.supported_nips) {
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(1));
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(9));
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(11));
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(13));
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(15));
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(20));
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(40));
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.supported_nips, cJSON_CreateNumber(42));
|
||||
}
|
||||
}
|
||||
|
||||
// Rebuild limitation object
|
||||
int max_message_length = 16384;
|
||||
const char* max_msg_str = get_config_value_from_table("max_message_length");
|
||||
if (max_msg_str) {
|
||||
max_message_length = atoi(max_msg_str);
|
||||
free((char*)max_msg_str);
|
||||
}
|
||||
|
||||
int max_subscriptions_per_client = 20;
|
||||
const char* max_subs_str = get_config_value_from_table("max_subscriptions_per_client");
|
||||
if (max_subs_str) {
|
||||
max_subscriptions_per_client = atoi(max_subs_str);
|
||||
free((char*)max_subs_str);
|
||||
}
|
||||
|
||||
int max_limit = 5000;
|
||||
const char* max_limit_str = get_config_value_from_table("max_limit");
|
||||
if (max_limit_str) {
|
||||
max_limit = atoi(max_limit_str);
|
||||
free((char*)max_limit_str);
|
||||
}
|
||||
|
||||
int max_event_tags = 100;
|
||||
const char* max_tags_str = get_config_value_from_table("max_event_tags");
|
||||
if (max_tags_str) {
|
||||
max_event_tags = atoi(max_tags_str);
|
||||
free((char*)max_tags_str);
|
||||
}
|
||||
|
||||
int max_content_length = 8196;
|
||||
const char* max_content_str = get_config_value_from_table("max_content_length");
|
||||
if (max_content_str) {
|
||||
max_content_length = atoi(max_content_str);
|
||||
free((char*)max_content_str);
|
||||
}
|
||||
|
||||
int default_limit = 500;
|
||||
const char* default_limit_str = get_config_value_from_table("default_limit");
|
||||
if (default_limit_str) {
|
||||
default_limit = atoi(default_limit_str);
|
||||
free((char*)default_limit_str);
|
||||
}
|
||||
|
||||
int admin_enabled = 0;
|
||||
const char* admin_enabled_str = get_config_value_from_table("admin_enabled");
|
||||
if (admin_enabled_str) {
|
||||
admin_enabled = (strcmp(admin_enabled_str, "true") == 0) ? 1 : 0;
|
||||
free((char*)admin_enabled_str);
|
||||
}
|
||||
|
||||
g_unified_cache.relay_info.limitation = cJSON_CreateObject();
|
||||
if (g_unified_cache.relay_info.limitation) {
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_message_length", max_message_length);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_subscriptions", max_subscriptions_per_client);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_limit", max_limit);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_subid_length", SUBSCRIPTION_ID_MAX_LENGTH);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_event_tags", max_event_tags);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "max_content_length", max_content_length);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "min_pow_difficulty", g_unified_cache.pow_config.min_pow_difficulty);
|
||||
cJSON_AddBoolToObject(g_unified_cache.relay_info.limitation, "auth_required", admin_enabled ? cJSON_True : cJSON_False);
|
||||
cJSON_AddBoolToObject(g_unified_cache.relay_info.limitation, "payment_required", cJSON_False);
|
||||
cJSON_AddBoolToObject(g_unified_cache.relay_info.limitation, "restricted_writes", cJSON_False);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "created_at_lower_limit", 0);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "created_at_upper_limit", 2147483647);
|
||||
cJSON_AddNumberToObject(g_unified_cache.relay_info.limitation, "default_limit", default_limit);
|
||||
}
|
||||
|
||||
// Rebuild other arrays (empty for now)
|
||||
g_unified_cache.relay_info.retention = cJSON_CreateArray();
|
||||
g_unified_cache.relay_info.language_tags = cJSON_CreateArray();
|
||||
if (g_unified_cache.relay_info.language_tags) {
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.language_tags, cJSON_CreateString("*"));
|
||||
}
|
||||
g_unified_cache.relay_info.relay_countries = cJSON_CreateArray();
|
||||
if (g_unified_cache.relay_info.relay_countries) {
|
||||
cJSON_AddItemToArray(g_unified_cache.relay_info.relay_countries, cJSON_CreateString("*"));
|
||||
}
|
||||
g_unified_cache.relay_info.tags = cJSON_CreateArray();
|
||||
g_unified_cache.relay_info.fees = cJSON_CreateObject();
|
||||
|
||||
}
|
||||
// Get config values for limitations
|
||||
int max_message_length = get_config_int("max_message_length", 16384);
|
||||
int max_subscriptions_per_client = get_config_int("max_subscriptions_per_client", 20);
|
||||
int max_limit = get_config_int("max_limit", 5000);
|
||||
int max_event_tags = get_config_int("max_event_tags", 100);
|
||||
int max_content_length = get_config_int("max_content_length", 8196);
|
||||
int default_limit = get_config_int("default_limit", 500);
|
||||
int min_pow_difficulty = get_config_int("pow_min_difficulty", 0);
|
||||
int admin_enabled = get_config_bool("admin_enabled", 0);
|
||||
|
||||
// Add basic relay information
|
||||
if (strlen(g_unified_cache.relay_info.name) > 0) {
|
||||
cJSON_AddStringToObject(info, "name", g_unified_cache.relay_info.name);
|
||||
if (relay_name && strlen(relay_name) > 0) {
|
||||
cJSON_AddStringToObject(info, "name", relay_name);
|
||||
free((char*)relay_name);
|
||||
} else {
|
||||
cJSON_AddStringToObject(info, "name", "C Nostr Relay");
|
||||
}
|
||||
if (strlen(g_unified_cache.relay_info.description) > 0) {
|
||||
cJSON_AddStringToObject(info, "description", g_unified_cache.relay_info.description);
|
||||
|
||||
if (relay_description && strlen(relay_description) > 0) {
|
||||
cJSON_AddStringToObject(info, "description", relay_description);
|
||||
free((char*)relay_description);
|
||||
} else {
|
||||
cJSON_AddStringToObject(info, "description", "A high-performance Nostr relay implemented in C with SQLite storage");
|
||||
}
|
||||
if (strlen(g_unified_cache.relay_info.banner) > 0) {
|
||||
cJSON_AddStringToObject(info, "banner", g_unified_cache.relay_info.banner);
|
||||
|
||||
if (relay_banner && strlen(relay_banner) > 0) {
|
||||
cJSON_AddStringToObject(info, "banner", relay_banner);
|
||||
free((char*)relay_banner);
|
||||
}
|
||||
if (strlen(g_unified_cache.relay_info.icon) > 0) {
|
||||
cJSON_AddStringToObject(info, "icon", g_unified_cache.relay_info.icon);
|
||||
|
||||
if (relay_icon && strlen(relay_icon) > 0) {
|
||||
cJSON_AddStringToObject(info, "icon", relay_icon);
|
||||
free((char*)relay_icon);
|
||||
}
|
||||
if (strlen(g_unified_cache.relay_info.pubkey) > 0) {
|
||||
cJSON_AddStringToObject(info, "pubkey", g_unified_cache.relay_info.pubkey);
|
||||
|
||||
if (relay_pubkey && strlen(relay_pubkey) > 0) {
|
||||
cJSON_AddStringToObject(info, "pubkey", relay_pubkey);
|
||||
free((char*)relay_pubkey);
|
||||
}
|
||||
if (strlen(g_unified_cache.relay_info.contact) > 0) {
|
||||
cJSON_AddStringToObject(info, "contact", g_unified_cache.relay_info.contact);
|
||||
|
||||
if (relay_contact && strlen(relay_contact) > 0) {
|
||||
cJSON_AddStringToObject(info, "contact", relay_contact);
|
||||
free((char*)relay_contact);
|
||||
}
|
||||
|
||||
|
||||
// Add supported NIPs
|
||||
if (g_unified_cache.relay_info.supported_nips) {
|
||||
cJSON_AddItemToObject(info, "supported_nips", cJSON_Duplicate(g_unified_cache.relay_info.supported_nips, 1));
|
||||
if (supported_nips_csv && strlen(supported_nips_csv) > 0) {
|
||||
cJSON* supported_nips = parse_comma_separated_array(supported_nips_csv);
|
||||
if (supported_nips) {
|
||||
cJSON_AddItemToObject(info, "supported_nips", supported_nips);
|
||||
}
|
||||
free((char*)supported_nips_csv);
|
||||
} else {
|
||||
// Default supported NIPs
|
||||
cJSON* supported_nips = cJSON_CreateArray();
|
||||
if (supported_nips) {
|
||||
cJSON_AddItemToArray(supported_nips, cJSON_CreateNumber(1)); // NIP-01: Basic protocol
|
||||
cJSON_AddItemToArray(supported_nips, cJSON_CreateNumber(9)); // NIP-09: Event deletion
|
||||
cJSON_AddItemToArray(supported_nips, cJSON_CreateNumber(11)); // NIP-11: Relay information
|
||||
cJSON_AddItemToArray(supported_nips, cJSON_CreateNumber(13)); // NIP-13: Proof of Work
|
||||
cJSON_AddItemToArray(supported_nips, cJSON_CreateNumber(15)); // NIP-15: EOSE
|
||||
cJSON_AddItemToArray(supported_nips, cJSON_CreateNumber(20)); // NIP-20: Command results
|
||||
cJSON_AddItemToArray(supported_nips, cJSON_CreateNumber(40)); // NIP-40: Expiration Timestamp
|
||||
cJSON_AddItemToArray(supported_nips, cJSON_CreateNumber(42)); // NIP-42: Authentication
|
||||
cJSON_AddItemToObject(info, "supported_nips", supported_nips);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Add software information
|
||||
if (strlen(g_unified_cache.relay_info.software) > 0) {
|
||||
cJSON_AddStringToObject(info, "software", g_unified_cache.relay_info.software);
|
||||
if (relay_software && strlen(relay_software) > 0) {
|
||||
cJSON_AddStringToObject(info, "software", relay_software);
|
||||
free((char*)relay_software);
|
||||
} else {
|
||||
cJSON_AddStringToObject(info, "software", "https://git.laantungir.net/laantungir/c-relay.git");
|
||||
}
|
||||
if (strlen(g_unified_cache.relay_info.version) > 0) {
|
||||
cJSON_AddStringToObject(info, "version", g_unified_cache.relay_info.version);
|
||||
|
||||
if (relay_version && strlen(relay_version) > 0) {
|
||||
cJSON_AddStringToObject(info, "version", relay_version);
|
||||
free((char*)relay_version);
|
||||
} else {
|
||||
cJSON_AddStringToObject(info, "version", "0.2.0");
|
||||
}
|
||||
|
||||
|
||||
// Add policies
|
||||
if (strlen(g_unified_cache.relay_info.privacy_policy) > 0) {
|
||||
cJSON_AddStringToObject(info, "privacy_policy", g_unified_cache.relay_info.privacy_policy);
|
||||
if (privacy_policy && strlen(privacy_policy) > 0) {
|
||||
cJSON_AddStringToObject(info, "privacy_policy", privacy_policy);
|
||||
free((char*)privacy_policy);
|
||||
}
|
||||
if (strlen(g_unified_cache.relay_info.terms_of_service) > 0) {
|
||||
cJSON_AddStringToObject(info, "terms_of_service", g_unified_cache.relay_info.terms_of_service);
|
||||
|
||||
if (terms_of_service && strlen(terms_of_service) > 0) {
|
||||
cJSON_AddStringToObject(info, "terms_of_service", terms_of_service);
|
||||
free((char*)terms_of_service);
|
||||
}
|
||||
if (strlen(g_unified_cache.relay_info.posting_policy) > 0) {
|
||||
cJSON_AddStringToObject(info, "posting_policy", g_unified_cache.relay_info.posting_policy);
|
||||
|
||||
if (posting_policy && strlen(posting_policy) > 0) {
|
||||
cJSON_AddStringToObject(info, "posting_policy", posting_policy);
|
||||
free((char*)posting_policy);
|
||||
}
|
||||
|
||||
|
||||
// Add server limitations
|
||||
if (g_unified_cache.relay_info.limitation) {
|
||||
cJSON_AddItemToObject(info, "limitation", cJSON_Duplicate(g_unified_cache.relay_info.limitation, 1));
|
||||
cJSON* limitation = cJSON_CreateObject();
|
||||
if (limitation) {
|
||||
cJSON_AddNumberToObject(limitation, "max_message_length", max_message_length);
|
||||
cJSON_AddNumberToObject(limitation, "max_subscriptions", max_subscriptions_per_client);
|
||||
cJSON_AddNumberToObject(limitation, "max_limit", max_limit);
|
||||
cJSON_AddNumberToObject(limitation, "max_subid_length", SUBSCRIPTION_ID_MAX_LENGTH);
|
||||
cJSON_AddNumberToObject(limitation, "max_event_tags", max_event_tags);
|
||||
cJSON_AddNumberToObject(limitation, "max_content_length", max_content_length);
|
||||
cJSON_AddNumberToObject(limitation, "min_pow_difficulty", min_pow_difficulty);
|
||||
cJSON_AddBoolToObject(limitation, "auth_required", admin_enabled ? cJSON_True : cJSON_False);
|
||||
cJSON_AddBoolToObject(limitation, "payment_required", cJSON_False);
|
||||
cJSON_AddBoolToObject(limitation, "restricted_writes", cJSON_False);
|
||||
cJSON_AddNumberToObject(limitation, "created_at_lower_limit", 0);
|
||||
cJSON_AddNumberToObject(limitation, "created_at_upper_limit", 2147483647);
|
||||
cJSON_AddNumberToObject(limitation, "default_limit", default_limit);
|
||||
cJSON_AddItemToObject(info, "limitation", limitation);
|
||||
}
|
||||
|
||||
// Add retention policies if configured
|
||||
if (g_unified_cache.relay_info.retention && cJSON_GetArraySize(g_unified_cache.relay_info.retention) > 0) {
|
||||
cJSON_AddItemToObject(info, "retention", cJSON_Duplicate(g_unified_cache.relay_info.retention, 1));
|
||||
|
||||
// Add retention policies (empty array for now)
|
||||
cJSON* retention = cJSON_CreateArray();
|
||||
if (retention) {
|
||||
cJSON_AddItemToObject(info, "retention", retention);
|
||||
}
|
||||
|
||||
|
||||
// Add geographical and language information
|
||||
if (g_unified_cache.relay_info.relay_countries) {
|
||||
cJSON_AddItemToObject(info, "relay_countries", cJSON_Duplicate(g_unified_cache.relay_info.relay_countries, 1));
|
||||
if (relay_countries_csv && strlen(relay_countries_csv) > 0) {
|
||||
cJSON* relay_countries = parse_comma_separated_array(relay_countries_csv);
|
||||
if (relay_countries) {
|
||||
cJSON_AddItemToObject(info, "relay_countries", relay_countries);
|
||||
}
|
||||
free((char*)relay_countries_csv);
|
||||
} else {
|
||||
cJSON* relay_countries = cJSON_CreateArray();
|
||||
if (relay_countries) {
|
||||
cJSON_AddItemToArray(relay_countries, cJSON_CreateString("*"));
|
||||
cJSON_AddItemToObject(info, "relay_countries", relay_countries);
|
||||
}
|
||||
}
|
||||
if (g_unified_cache.relay_info.language_tags) {
|
||||
cJSON_AddItemToObject(info, "language_tags", cJSON_Duplicate(g_unified_cache.relay_info.language_tags, 1));
|
||||
|
||||
if (language_tags_csv && strlen(language_tags_csv) > 0) {
|
||||
cJSON* language_tags = parse_comma_separated_array(language_tags_csv);
|
||||
if (language_tags) {
|
||||
cJSON_AddItemToObject(info, "language_tags", language_tags);
|
||||
}
|
||||
free((char*)language_tags_csv);
|
||||
} else {
|
||||
cJSON* language_tags = cJSON_CreateArray();
|
||||
if (language_tags) {
|
||||
cJSON_AddItemToArray(language_tags, cJSON_CreateString("*"));
|
||||
cJSON_AddItemToObject(info, "language_tags", language_tags);
|
||||
}
|
||||
}
|
||||
if (g_unified_cache.relay_info.tags && cJSON_GetArraySize(g_unified_cache.relay_info.tags) > 0) {
|
||||
cJSON_AddItemToObject(info, "tags", cJSON_Duplicate(g_unified_cache.relay_info.tags, 1));
|
||||
|
||||
// Add content tags (empty array)
|
||||
cJSON* tags = cJSON_CreateArray();
|
||||
if (tags) {
|
||||
cJSON_AddItemToObject(info, "tags", tags);
|
||||
}
|
||||
|
||||
|
||||
// Add payment information if configured
|
||||
if (strlen(g_unified_cache.relay_info.payments_url) > 0) {
|
||||
cJSON_AddStringToObject(info, "payments_url", g_unified_cache.relay_info.payments_url);
|
||||
if (payments_url && strlen(payments_url) > 0) {
|
||||
cJSON_AddStringToObject(info, "payments_url", payments_url);
|
||||
free((char*)payments_url);
|
||||
}
|
||||
if (g_unified_cache.relay_info.fees && cJSON_GetObjectItem(g_unified_cache.relay_info.fees, "admission")) {
|
||||
cJSON_AddItemToObject(info, "fees", cJSON_Duplicate(g_unified_cache.relay_info.fees, 1));
|
||||
|
||||
// Add fees (empty object - no payment required by default)
|
||||
cJSON* fees = cJSON_CreateObject();
|
||||
if (fees) {
|
||||
cJSON_AddItemToObject(info, "fees", fees);
|
||||
}
|
||||
|
||||
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
|
||||
75
src/nip013.c
@@ -10,63 +10,38 @@
|
||||
#include "config.h"
|
||||
|
||||
|
||||
// NIP-13 PoW configuration structure
|
||||
struct pow_config {
|
||||
int enabled; // 0 = disabled, 1 = enabled
|
||||
int min_pow_difficulty; // Minimum required difficulty (0 = no requirement)
|
||||
int validation_flags; // Bitflags for validation options
|
||||
int require_nonce_tag; // 1 = require nonce tag presence
|
||||
int reject_lower_targets; // 1 = reject if committed < actual difficulty
|
||||
int strict_format; // 1 = enforce strict nonce tag format
|
||||
int anti_spam_mode; // 1 = full anti-spam validation
|
||||
};
|
||||
// Configuration functions from config.c
|
||||
extern int get_config_bool(const char* key, int default_value);
|
||||
extern int get_config_int(const char* key, int default_value);
|
||||
extern const char* get_config_value(const char* key);
|
||||
|
||||
// Initialize PoW configuration using configuration system
|
||||
void init_pow_config() {
|
||||
|
||||
// Get all config values first (without holding mutex to avoid deadlock)
|
||||
int pow_enabled = get_config_bool("pow_enabled", 1);
|
||||
int pow_min_difficulty = get_config_int("pow_min_difficulty", 0);
|
||||
const char* pow_mode = get_config_value("pow_mode");
|
||||
|
||||
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||
|
||||
// Load PoW settings from configuration system
|
||||
g_unified_cache.pow_config.enabled = pow_enabled;
|
||||
g_unified_cache.pow_config.min_pow_difficulty = pow_min_difficulty;
|
||||
|
||||
// Configure PoW mode
|
||||
if (pow_mode) {
|
||||
if (strcmp(pow_mode, "strict") == 0) {
|
||||
g_unified_cache.pow_config.validation_flags = NOSTR_POW_VALIDATE_ANTI_SPAM | NOSTR_POW_STRICT_FORMAT;
|
||||
g_unified_cache.pow_config.require_nonce_tag = 1;
|
||||
g_unified_cache.pow_config.reject_lower_targets = 1;
|
||||
g_unified_cache.pow_config.strict_format = 1;
|
||||
g_unified_cache.pow_config.anti_spam_mode = 1;
|
||||
} else if (strcmp(pow_mode, "full") == 0) {
|
||||
g_unified_cache.pow_config.validation_flags = NOSTR_POW_VALIDATE_FULL;
|
||||
g_unified_cache.pow_config.require_nonce_tag = 1;
|
||||
} else if (strcmp(pow_mode, "basic") == 0) {
|
||||
g_unified_cache.pow_config.validation_flags = NOSTR_POW_VALIDATE_BASIC;
|
||||
} else if (strcmp(pow_mode, "disabled") == 0) {
|
||||
g_unified_cache.pow_config.enabled = 0;
|
||||
}
|
||||
free((char*)pow_mode); // Free dynamically allocated string
|
||||
} else {
|
||||
// Default to basic mode
|
||||
g_unified_cache.pow_config.validation_flags = NOSTR_POW_VALIDATE_BASIC;
|
||||
}
|
||||
|
||||
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||
// Configuration is now handled directly through database queries
|
||||
// No cache initialization needed
|
||||
}
|
||||
|
||||
// Validate event Proof of Work according to NIP-13
|
||||
int validate_event_pow(cJSON* event, char* error_message, size_t error_size) {
|
||||
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||
int enabled = g_unified_cache.pow_config.enabled;
|
||||
int min_pow_difficulty = g_unified_cache.pow_config.min_pow_difficulty;
|
||||
int validation_flags = g_unified_cache.pow_config.validation_flags;
|
||||
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||
// Get PoW configuration directly from database
|
||||
int enabled = get_config_bool("pow_enabled", 1);
|
||||
int min_pow_difficulty = get_config_int("pow_min_difficulty", 0);
|
||||
const char* pow_mode = get_config_value("pow_mode");
|
||||
|
||||
// Determine validation flags based on mode
|
||||
int validation_flags = NOSTR_POW_VALIDATE_BASIC; // Default
|
||||
if (pow_mode) {
|
||||
if (strcmp(pow_mode, "strict") == 0) {
|
||||
validation_flags = NOSTR_POW_VALIDATE_ANTI_SPAM | NOSTR_POW_STRICT_FORMAT;
|
||||
} else if (strcmp(pow_mode, "full") == 0) {
|
||||
validation_flags = NOSTR_POW_VALIDATE_FULL;
|
||||
} else if (strcmp(pow_mode, "basic") == 0) {
|
||||
validation_flags = NOSTR_POW_VALIDATE_BASIC;
|
||||
} else if (strcmp(pow_mode, "disabled") == 0) {
|
||||
enabled = 0;
|
||||
}
|
||||
free((char*)pow_mode);
|
||||
}
|
||||
|
||||
if (!enabled) {
|
||||
return 0; // PoW validation disabled
|
||||
|
||||
27
src/nip042.c
@@ -12,6 +12,7 @@
|
||||
#include <string.h>
|
||||
#include <stdlib.h>
|
||||
#include <time.h>
|
||||
#include "websockets.h"
|
||||
|
||||
|
||||
// Forward declaration for notice message function
|
||||
@@ -22,23 +23,7 @@ int nostr_nip42_generate_challenge(char *challenge_buffer, size_t buffer_size);
|
||||
int nostr_nip42_verify_auth_event(cJSON *event, const char *challenge_id,
|
||||
const char *relay_url, int time_tolerance_seconds);
|
||||
|
||||
// Forward declaration for per_session_data struct (defined in main.c)
|
||||
struct per_session_data {
|
||||
int authenticated;
|
||||
void* subscriptions; // Head of this session's subscription list
|
||||
pthread_mutex_t session_lock; // Per-session thread safety
|
||||
char client_ip[41]; // Client IP for logging
|
||||
int subscription_count; // Number of subscriptions for this session
|
||||
|
||||
// NIP-42 Authentication State
|
||||
char authenticated_pubkey[65]; // Authenticated public key (64 hex + null)
|
||||
char active_challenge[65]; // Current challenge for this session (64 hex + null)
|
||||
time_t challenge_created; // When challenge was created
|
||||
time_t challenge_expires; // Challenge expiration time
|
||||
int nip42_auth_required_events; // Whether NIP-42 auth is required for EVENT submission
|
||||
int nip42_auth_required_subscriptions; // Whether NIP-42 auth is required for REQ operations
|
||||
int auth_challenge_sent; // Whether challenge has been sent (0/1)
|
||||
};
|
||||
// Forward declaration for per_session_data struct (defined in websockets.h)
|
||||
|
||||
|
||||
// Send NIP-42 authentication challenge to client
|
||||
@@ -70,11 +55,9 @@ void send_nip42_auth_challenge(struct lws* wsi, struct per_session_data* pss) {
|
||||
char* msg_str = cJSON_Print(auth_msg);
|
||||
if (msg_str) {
|
||||
size_t msg_len = strlen(msg_str);
|
||||
unsigned char* buf = malloc(LWS_PRE + msg_len);
|
||||
if (buf) {
|
||||
memcpy(buf + LWS_PRE, msg_str, msg_len);
|
||||
lws_write(wsi, buf + LWS_PRE, msg_len, LWS_WRITE_TEXT);
|
||||
free(buf);
|
||||
// Use proper message queue system instead of direct lws_write
|
||||
if (queue_message(wsi, pss, msg_str, msg_len, LWS_WRITE_TEXT) != 0) {
|
||||
DEBUG_ERROR("Failed to queue AUTH challenge message");
|
||||
}
|
||||
free(msg_str);
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
#include "../nostr_core_lib/nostr_core/nip013.h" // NIP-13: Proof of Work
|
||||
#include "../nostr_core_lib/nostr_core/nostr_common.h"
|
||||
#include "../nostr_core_lib/nostr_core/utils.h"
|
||||
#include "debug.h" // C-relay debug system
|
||||
#include "config.h" // C-relay configuration system
|
||||
#include <sqlite3.h>
|
||||
#include <stdio.h>
|
||||
@@ -360,11 +361,9 @@ int nostr_validate_unified_request(const char* json_string, size_t json_length)
|
||||
/////////////////////////////////////////////////////////////////////
|
||||
|
||||
// 12. NIP-13 Proof of Work validation
|
||||
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||
int pow_enabled = g_unified_cache.pow_config.enabled;
|
||||
int pow_min_difficulty = g_unified_cache.pow_config.min_pow_difficulty;
|
||||
int pow_validation_flags = g_unified_cache.pow_config.validation_flags;
|
||||
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||
int pow_enabled = get_config_bool("pow_enabled", 0);
|
||||
int pow_min_difficulty = get_config_int("pow_min_difficulty", 0);
|
||||
int pow_validation_flags = get_config_int("pow_validation_flags", 1);
|
||||
|
||||
if (pow_enabled && pow_min_difficulty > 0) {
|
||||
nostr_pow_result_t pow_result;
|
||||
@@ -505,11 +504,10 @@ void nostr_request_result_free_file_data(nostr_request_result_t *result) {
|
||||
|
||||
|
||||
/**
|
||||
* Force cache refresh - use unified cache system
|
||||
* Force cache refresh - cache no longer exists, function kept for compatibility
|
||||
*/
|
||||
void nostr_request_validator_force_cache_refresh(void) {
|
||||
// Use unified cache refresh from config.c
|
||||
force_config_cache_refresh();
|
||||
// Cache no longer exists - direct database queries are used
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -534,6 +532,8 @@ int check_database_auth_rules(const char *pubkey, const char *operation __attrib
|
||||
sqlite3_stmt *stmt = NULL;
|
||||
int rc;
|
||||
|
||||
DEBUG_TRACE("Checking auth rules for pubkey: %s", pubkey);
|
||||
|
||||
if (!pubkey) {
|
||||
return NOSTR_ERROR_INVALID_INPUT;
|
||||
}
|
||||
@@ -550,19 +550,21 @@ int check_database_auth_rules(const char *pubkey, const char *operation __attrib
|
||||
|
||||
// Step 1: Check pubkey blacklist (highest priority)
|
||||
const char *blacklist_sql =
|
||||
"SELECT rule_type, action FROM auth_rules WHERE rule_type = "
|
||||
"'blacklist' AND pattern_type = 'pubkey' AND pattern_value = ? LIMIT 1";
|
||||
"SELECT rule_type FROM auth_rules WHERE rule_type = "
|
||||
"'blacklist' AND pattern_type = 'pubkey' AND pattern_value = ? AND active = 1 LIMIT 1";
|
||||
DEBUG_TRACE("Blacklist SQL: %s", blacklist_sql);
|
||||
rc = sqlite3_prepare_v2(db, blacklist_sql, -1, &stmt, NULL);
|
||||
if (rc == SQLITE_OK) {
|
||||
sqlite3_bind_text(stmt, 1, pubkey, -1, SQLITE_STATIC);
|
||||
|
||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
const char *action = (const char *)sqlite3_column_text(stmt, 1);
|
||||
int step_result = sqlite3_step(stmt);
|
||||
DEBUG_TRACE("Blacklist query result: %s", step_result == SQLITE_ROW ? "FOUND" : "NOT_FOUND");
|
||||
|
||||
if (step_result == SQLITE_ROW) {
|
||||
DEBUG_TRACE("BLACKLIST HIT: Denying access for pubkey: %s", pubkey);
|
||||
// Set specific violation details for status code mapping
|
||||
strcpy(g_last_rule_violation.violation_type, "pubkey_blacklist");
|
||||
sprintf(g_last_rule_violation.reason, "Public key blacklisted: %s",
|
||||
action ? action : "PUBKEY_BLACKLIST");
|
||||
sprintf(g_last_rule_violation.reason, "Public key blacklisted");
|
||||
|
||||
sqlite3_finalize(stmt);
|
||||
sqlite3_close(db);
|
||||
@@ -574,19 +576,16 @@ int check_database_auth_rules(const char *pubkey, const char *operation __attrib
|
||||
// Step 2: Check hash blacklist
|
||||
if (resource_hash) {
|
||||
const char *hash_blacklist_sql =
|
||||
"SELECT rule_type, action FROM auth_rules WHERE rule_type = "
|
||||
"'blacklist' AND pattern_type = 'hash' AND pattern_value = ? LIMIT 1";
|
||||
"SELECT rule_type FROM auth_rules WHERE rule_type = "
|
||||
"'blacklist' AND pattern_type = 'hash' AND pattern_value = ? AND active = 1 LIMIT 1";
|
||||
rc = sqlite3_prepare_v2(db, hash_blacklist_sql, -1, &stmt, NULL);
|
||||
if (rc == SQLITE_OK) {
|
||||
sqlite3_bind_text(stmt, 1, resource_hash, -1, SQLITE_STATIC);
|
||||
|
||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
const char *action = (const char *)sqlite3_column_text(stmt, 1);
|
||||
|
||||
// Set specific violation details for status code mapping
|
||||
strcpy(g_last_rule_violation.violation_type, "hash_blacklist");
|
||||
sprintf(g_last_rule_violation.reason, "File hash blacklisted: %s",
|
||||
action ? action : "HASH_BLACKLIST");
|
||||
sprintf(g_last_rule_violation.reason, "File hash blacklisted");
|
||||
|
||||
sqlite3_finalize(stmt);
|
||||
sqlite3_close(db);
|
||||
@@ -598,8 +597,8 @@ int check_database_auth_rules(const char *pubkey, const char *operation __attrib
|
||||
|
||||
// Step 3: Check pubkey whitelist
|
||||
const char *whitelist_sql =
|
||||
"SELECT rule_type, action FROM auth_rules WHERE rule_type = "
|
||||
"'whitelist' AND pattern_type = 'pubkey' AND pattern_value = ? LIMIT 1";
|
||||
"SELECT rule_type FROM auth_rules WHERE rule_type = "
|
||||
"'whitelist' AND pattern_type = 'pubkey' AND pattern_value = ? AND active = 1 LIMIT 1";
|
||||
rc = sqlite3_prepare_v2(db, whitelist_sql, -1, &stmt, NULL);
|
||||
if (rc == SQLITE_OK) {
|
||||
sqlite3_bind_text(stmt, 1, pubkey, -1, SQLITE_STATIC);
|
||||
@@ -615,7 +614,7 @@ int check_database_auth_rules(const char *pubkey, const char *operation __attrib
|
||||
// Step 4: Check if any whitelist rules exist - if yes, deny by default
|
||||
const char *whitelist_exists_sql =
|
||||
"SELECT COUNT(*) FROM auth_rules WHERE rule_type = 'whitelist' "
|
||||
"AND pattern_type = 'pubkey' LIMIT 1";
|
||||
"AND pattern_type = 'pubkey' AND active = 1 LIMIT 1";
|
||||
rc = sqlite3_prepare_v2(db, whitelist_exists_sql, -1, &stmt, NULL);
|
||||
if (rc == SQLITE_OK) {
|
||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||
|
||||
111
src/sql_schema.h
@@ -1,12 +1,11 @@
|
||||
/* Embedded SQL Schema for C Nostr Relay
|
||||
* Generated from db/schema.sql - Do not edit manually
|
||||
* Schema Version: 7
|
||||
* Schema Version: 8
|
||||
*/
|
||||
#ifndef SQL_SCHEMA_H
|
||||
#define SQL_SCHEMA_H
|
||||
|
||||
/* Schema version constant */
|
||||
#define EMBEDDED_SCHEMA_VERSION "7"
|
||||
#define EMBEDDED_SCHEMA_VERSION "8"
|
||||
|
||||
/* Embedded SQL schema as C string literal */
|
||||
static const char* const EMBEDDED_SCHEMA_SQL =
|
||||
@@ -15,7 +14,7 @@ static const char* const EMBEDDED_SCHEMA_SQL =
|
||||
-- Configuration system using config table\n\
|
||||
\n\
|
||||
-- Schema version tracking\n\
|
||||
PRAGMA user_version = 7;\n\
|
||||
PRAGMA user_version = 8;\n\
|
||||
\n\
|
||||
-- Enable foreign key support\n\
|
||||
PRAGMA foreign_keys = ON;\n\
|
||||
@@ -58,8 +57,8 @@ CREATE TABLE schema_info (\n\
|
||||
\n\
|
||||
-- Insert schema metadata\n\
|
||||
INSERT INTO schema_info (key, value) VALUES\n\
|
||||
('version', '7'),\n\
|
||||
('description', 'Hybrid Nostr relay schema with event-based and table-based configuration'),\n\
|
||||
('version', '8'),\n\
|
||||
('description', 'Hybrid Nostr relay schema with subscription deduplication support'),\n\
|
||||
('created_at', strftime('%s', 'now'));\n\
|
||||
\n\
|
||||
-- Helper views for common queries\n\
|
||||
@@ -93,16 +92,6 @@ FROM events\n\
|
||||
WHERE kind = 33334\n\
|
||||
ORDER BY created_at DESC;\n\
|
||||
\n\
|
||||
-- Optimization: Trigger for automatic cleanup of ephemeral events older than 1 hour\n\
|
||||
CREATE TRIGGER cleanup_ephemeral_events\n\
|
||||
AFTER INSERT ON events\n\
|
||||
WHEN NEW.event_type = 'ephemeral'\n\
|
||||
BEGIN\n\
|
||||
DELETE FROM events \n\
|
||||
WHERE event_type = 'ephemeral' \n\
|
||||
AND first_seen < (strftime('%s', 'now') - 3600);\n\
|
||||
END;\n\
|
||||
\n\
|
||||
-- Replaceable event handling trigger\n\
|
||||
CREATE TRIGGER handle_replaceable_events\n\
|
||||
AFTER INSERT ON events\n\
|
||||
@@ -142,8 +131,6 @@ CREATE TABLE auth_rules (\n\
|
||||
rule_type TEXT NOT NULL CHECK (rule_type IN ('whitelist', 'blacklist', 'rate_limit', 'auth_required')),\n\
|
||||
pattern_type TEXT NOT NULL CHECK (pattern_type IN ('pubkey', 'kind', 'ip', 'global')),\n\
|
||||
pattern_value TEXT,\n\
|
||||
action TEXT NOT NULL CHECK (action IN ('allow', 'deny', 'require_auth', 'rate_limit')),\n\
|
||||
parameters TEXT, -- JSON parameters for rate limiting, etc.\n\
|
||||
active INTEGER NOT NULL DEFAULT 1,\n\
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),\n\
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now'))\n\
|
||||
@@ -180,48 +167,22 @@ BEGIN\n\
|
||||
UPDATE config SET updated_at = strftime('%s', 'now') WHERE key = NEW.key;\n\
|
||||
END;\n\
|
||||
\n\
|
||||
-- Insert default configuration values\n\
|
||||
INSERT INTO config (key, value, data_type, description, category, requires_restart) VALUES\n\
|
||||
('relay_description', 'A C Nostr Relay', 'string', 'Relay description', 'general', 0),\n\
|
||||
('relay_contact', '', 'string', 'Relay contact information', 'general', 0),\n\
|
||||
('relay_software', 'https://github.com/laanwj/c-relay', 'string', 'Relay software URL', 'general', 0),\n\
|
||||
('relay_version', '1.0.0', 'string', 'Relay version', 'general', 0),\n\
|
||||
('relay_port', '8888', 'integer', 'Relay port number', 'network', 1),\n\
|
||||
('max_connections', '1000', 'integer', 'Maximum concurrent connections', 'network', 1),\n\
|
||||
('auth_enabled', 'false', 'boolean', 'Enable NIP-42 authentication', 'auth', 0),\n\
|
||||
('nip42_auth_required_events', 'false', 'boolean', 'Require auth for event publishing', 'auth', 0),\n\
|
||||
('nip42_auth_required_subscriptions', 'false', 'boolean', 'Require auth for subscriptions', 'auth', 0),\n\
|
||||
('nip42_auth_required_kinds', '[]', 'json', 'Event kinds requiring authentication', 'auth', 0),\n\
|
||||
('nip42_challenge_expiration', '600', 'integer', 'Auth challenge expiration seconds', 'auth', 0),\n\
|
||||
('pow_min_difficulty', '0', 'integer', 'Minimum proof-of-work difficulty', 'validation', 0),\n\
|
||||
('pow_mode', 'optional', 'string', 'Proof-of-work mode', 'validation', 0),\n\
|
||||
('nip40_expiration_enabled', 'true', 'boolean', 'Enable event expiration', 'validation', 0),\n\
|
||||
('nip40_expiration_strict', 'false', 'boolean', 'Strict expiration mode', 'validation', 0),\n\
|
||||
('nip40_expiration_filter', 'true', 'boolean', 'Filter expired events in queries', 'validation', 0),\n\
|
||||
('nip40_expiration_grace_period', '60', 'integer', 'Expiration grace period seconds', 'validation', 0),\n\
|
||||
('max_subscriptions_per_client', '25', 'integer', 'Maximum subscriptions per client', 'limits', 0),\n\
|
||||
('max_total_subscriptions', '1000', 'integer', 'Maximum total subscriptions', 'limits', 0),\n\
|
||||
('max_filters_per_subscription', '10', 'integer', 'Maximum filters per subscription', 'limits', 0),\n\
|
||||
('max_event_tags', '2000', 'integer', 'Maximum tags per event', 'limits', 0),\n\
|
||||
('max_content_length', '100000', 'integer', 'Maximum event content length', 'limits', 0),\n\
|
||||
('max_message_length', '131072', 'integer', 'Maximum WebSocket message length', 'limits', 0),\n\
|
||||
('default_limit', '100', 'integer', 'Default query limit', 'limits', 0),\n\
|
||||
('max_limit', '5000', 'integer', 'Maximum query limit', 'limits', 0);\n\
|
||||
\n\
|
||||
-- Persistent Subscriptions Logging Tables (Phase 2)\n\
|
||||
-- Optional database logging for subscription analytics and debugging\n\
|
||||
\n\
|
||||
-- Subscription events log\n\
|
||||
CREATE TABLE subscription_events (\n\
|
||||
-- Subscriptions log (renamed from subscription_events for clarity)\n\
|
||||
CREATE TABLE subscriptions (\n\
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,\n\
|
||||
subscription_id TEXT NOT NULL, -- Subscription ID from client\n\
|
||||
wsi_pointer TEXT NOT NULL, -- WebSocket pointer address (hex string)\n\
|
||||
client_ip TEXT NOT NULL, -- Client IP address\n\
|
||||
event_type TEXT NOT NULL CHECK (event_type IN ('created', 'closed', 'expired', 'disconnected')),\n\
|
||||
filter_json TEXT, -- JSON representation of filters (for created events)\n\
|
||||
events_sent INTEGER DEFAULT 0, -- Number of events sent to this subscription\n\
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),\n\
|
||||
ended_at INTEGER, -- When subscription ended (for closed/expired/disconnected)\n\
|
||||
duration INTEGER -- Computed: ended_at - created_at\n\
|
||||
duration INTEGER, -- Computed: ended_at - created_at\n\
|
||||
UNIQUE(subscription_id, wsi_pointer) -- Prevent duplicate subscriptions per connection\n\
|
||||
);\n\
|
||||
\n\
|
||||
-- Subscription metrics summary\n\
|
||||
@@ -237,34 +198,23 @@ CREATE TABLE subscription_metrics (\n\
|
||||
UNIQUE(date)\n\
|
||||
);\n\
|
||||
\n\
|
||||
-- Event broadcasting log (optional, for detailed analytics)\n\
|
||||
CREATE TABLE event_broadcasts (\n\
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,\n\
|
||||
event_id TEXT NOT NULL, -- Event ID that was broadcast\n\
|
||||
subscription_id TEXT NOT NULL, -- Subscription that received it\n\
|
||||
client_ip TEXT NOT NULL, -- Client IP\n\
|
||||
broadcast_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),\n\
|
||||
FOREIGN KEY (event_id) REFERENCES events(id)\n\
|
||||
);\n\
|
||||
\n\
|
||||
-- Indexes for subscription logging performance\n\
|
||||
CREATE INDEX idx_subscription_events_id ON subscription_events(subscription_id);\n\
|
||||
CREATE INDEX idx_subscription_events_type ON subscription_events(event_type);\n\
|
||||
CREATE INDEX idx_subscription_events_created ON subscription_events(created_at DESC);\n\
|
||||
CREATE INDEX idx_subscription_events_client ON subscription_events(client_ip);\n\
|
||||
CREATE INDEX idx_subscriptions_id ON subscriptions(subscription_id);\n\
|
||||
CREATE INDEX idx_subscriptions_type ON subscriptions(event_type);\n\
|
||||
CREATE INDEX idx_subscriptions_created ON subscriptions(created_at DESC);\n\
|
||||
CREATE INDEX idx_subscriptions_client ON subscriptions(client_ip);\n\
|
||||
CREATE INDEX idx_subscriptions_wsi ON subscriptions(wsi_pointer);\n\
|
||||
\n\
|
||||
CREATE INDEX idx_subscription_metrics_date ON subscription_metrics(date DESC);\n\
|
||||
\n\
|
||||
CREATE INDEX idx_event_broadcasts_event ON event_broadcasts(event_id);\n\
|
||||
CREATE INDEX idx_event_broadcasts_sub ON event_broadcasts(subscription_id);\n\
|
||||
CREATE INDEX idx_event_broadcasts_time ON event_broadcasts(broadcast_at DESC);\n\
|
||||
\n\
|
||||
-- Trigger to update subscription duration when ended\n\
|
||||
CREATE TRIGGER update_subscription_duration\n\
|
||||
AFTER UPDATE OF ended_at ON subscription_events\n\
|
||||
AFTER UPDATE OF ended_at ON subscriptions\n\
|
||||
WHEN NEW.ended_at IS NOT NULL AND OLD.ended_at IS NULL\n\
|
||||
BEGIN\n\
|
||||
UPDATE subscription_events\n\
|
||||
UPDATE subscriptions\n\
|
||||
SET duration = NEW.ended_at - NEW.created_at\n\
|
||||
WHERE id = NEW.id;\n\
|
||||
END;\n\
|
||||
@@ -279,24 +229,27 @@ SELECT\n\
|
||||
MAX(events_sent) as max_events_sent,\n\
|
||||
AVG(events_sent) as avg_events_sent,\n\
|
||||
COUNT(DISTINCT client_ip) as unique_clients\n\
|
||||
FROM subscription_events\n\
|
||||
FROM subscriptions\n\
|
||||
GROUP BY date(created_at, 'unixepoch')\n\
|
||||
ORDER BY date DESC;\n\
|
||||
\n\
|
||||
-- View for current active subscriptions (from log perspective)\n\
|
||||
CREATE VIEW active_subscriptions_log AS\n\
|
||||
SELECT\n\
|
||||
subscription_id,\n\
|
||||
client_ip,\n\
|
||||
filter_json,\n\
|
||||
events_sent,\n\
|
||||
created_at,\n\
|
||||
(strftime('%s', 'now') - created_at) as duration_seconds\n\
|
||||
FROM subscription_events\n\
|
||||
WHERE event_type = 'created'\n\
|
||||
AND subscription_id NOT IN (\n\
|
||||
SELECT subscription_id FROM subscription_events\n\
|
||||
WHERE event_type IN ('closed', 'expired', 'disconnected')\n\
|
||||
s.subscription_id,\n\
|
||||
s.client_ip,\n\
|
||||
s.filter_json,\n\
|
||||
s.events_sent,\n\
|
||||
s.created_at,\n\
|
||||
(strftime('%s', 'now') - s.created_at) as duration_seconds,\n\
|
||||
s.wsi_pointer\n\
|
||||
FROM subscriptions s\n\
|
||||
WHERE s.event_type = 'created'\n\
|
||||
AND NOT EXISTS (\n\
|
||||
SELECT 1 FROM subscriptions s2\n\
|
||||
WHERE s2.subscription_id = s.subscription_id\n\
|
||||
AND s2.wsi_pointer = s.wsi_pointer\n\
|
||||
AND s2.event_type IN ('closed', 'expired', 'disconnected')\n\
|
||||
);\n\
|
||||
\n\
|
||||
-- Database Statistics Views for Admin API\n\
|
||||
|
||||
@@ -25,11 +25,14 @@ int validate_timestamp_range(long since, long until, char* error_message, size_t
|
||||
int validate_numeric_limits(int limit, char* error_message, size_t error_size);
|
||||
int validate_search_term(const char* search_term, char* error_message, size_t error_size);
|
||||
|
||||
// Forward declaration for monitoring function
|
||||
void monitoring_on_subscription_change(void);
|
||||
|
||||
// Global database variable
|
||||
extern sqlite3* g_db;
|
||||
|
||||
// Global unified cache
|
||||
extern unified_config_cache_t g_unified_cache;
|
||||
// Configuration functions from config.c
|
||||
extern int get_config_bool(const char* key, int default_value);
|
||||
|
||||
// Global subscription manager
|
||||
extern subscription_manager_t g_subscription_manager;
|
||||
@@ -123,7 +126,7 @@ void free_subscription_filter(subscription_filter_t* filter) {
|
||||
}
|
||||
|
||||
// Validate subscription ID format and length
|
||||
static int validate_subscription_id(const char* sub_id) {
|
||||
int validate_subscription_id(const char* sub_id) {
|
||||
if (!sub_id) {
|
||||
return 0; // NULL pointer
|
||||
}
|
||||
@@ -133,11 +136,11 @@ static int validate_subscription_id(const char* sub_id) {
|
||||
return 0; // Empty or too long
|
||||
}
|
||||
|
||||
// Check for valid characters (alphanumeric, underscore, hyphen, colon)
|
||||
// Check for valid characters (alphanumeric, underscore, hyphen, colon, comma)
|
||||
for (size_t i = 0; i < len; i++) {
|
||||
char c = sub_id[i];
|
||||
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
|
||||
(c >= '0' && c <= '9') || c == '_' || c == '-' || c == ':')) {
|
||||
(c >= '0' && c <= '9') || c == '_' || c == '-' || c == ':' || c == ',')) {
|
||||
return 0; // Invalid character
|
||||
}
|
||||
}
|
||||
@@ -238,27 +241,81 @@ void free_subscription(subscription_t* sub) {
|
||||
// Add subscription to global manager (thread-safe)
|
||||
int add_subscription_to_manager(subscription_t* sub) {
|
||||
if (!sub) return -1;
|
||||
|
||||
|
||||
pthread_mutex_lock(&g_subscription_manager.subscriptions_lock);
|
||||
|
||||
// Check global limits
|
||||
if (g_subscription_manager.total_subscriptions >= g_subscription_manager.max_total_subscriptions) {
|
||||
|
||||
// Check for existing subscription with same ID and WebSocket connection
|
||||
// Remove it first to prevent duplicates (implements subscription replacement per NIP-01)
|
||||
subscription_t** current = &g_subscription_manager.active_subscriptions;
|
||||
int found_duplicate = 0;
|
||||
subscription_t* duplicate_old = NULL;
|
||||
|
||||
while (*current) {
|
||||
subscription_t* existing = *current;
|
||||
|
||||
// Match by subscription ID and WebSocket pointer
|
||||
if (strcmp(existing->id, sub->id) == 0 && existing->wsi == sub->wsi) {
|
||||
// Found duplicate: mark inactive and unlink from global list under lock
|
||||
existing->active = 0;
|
||||
*current = existing->next;
|
||||
g_subscription_manager.total_subscriptions--;
|
||||
found_duplicate = 1;
|
||||
duplicate_old = existing; // defer free until after per-session unlink
|
||||
break;
|
||||
}
|
||||
|
||||
current = &(existing->next);
|
||||
}
|
||||
|
||||
// Check global limits (only if not replacing an existing subscription)
|
||||
if (!found_duplicate && g_subscription_manager.total_subscriptions >= g_subscription_manager.max_total_subscriptions) {
|
||||
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||
DEBUG_ERROR("Maximum total subscriptions reached");
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
// Add to global list
|
||||
sub->next = g_subscription_manager.active_subscriptions;
|
||||
g_subscription_manager.active_subscriptions = sub;
|
||||
g_subscription_manager.total_subscriptions++;
|
||||
g_subscription_manager.total_created++;
|
||||
|
||||
|
||||
// Only increment total_created if this is a new subscription (not a replacement)
|
||||
if (!found_duplicate) {
|
||||
g_subscription_manager.total_created++;
|
||||
}
|
||||
|
||||
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||
|
||||
// Log subscription creation to database
|
||||
|
||||
// If we replaced an existing subscription, unlink it from the per-session list before freeing
|
||||
if (duplicate_old) {
|
||||
// Obtain per-session data for this wsi
|
||||
struct per_session_data* pss = (struct per_session_data*) lws_wsi_user(duplicate_old->wsi);
|
||||
if (pss) {
|
||||
pthread_mutex_lock(&pss->session_lock);
|
||||
struct subscription** scur = &pss->subscriptions;
|
||||
while (*scur) {
|
||||
if (*scur == duplicate_old) {
|
||||
// Unlink by pointer identity to avoid removing the newly-added one
|
||||
*scur = duplicate_old->session_next;
|
||||
if (pss->subscription_count > 0) {
|
||||
pss->subscription_count--;
|
||||
}
|
||||
break;
|
||||
}
|
||||
scur = &((*scur)->session_next);
|
||||
}
|
||||
pthread_mutex_unlock(&pss->session_lock);
|
||||
}
|
||||
// Now safe to free the old subscription
|
||||
free_subscription(duplicate_old);
|
||||
}
|
||||
|
||||
// Log subscription creation to database (INSERT OR REPLACE handles duplicates)
|
||||
log_subscription_created(sub);
|
||||
|
||||
// Trigger monitoring update for subscription changes
|
||||
monitoring_on_subscription_change();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -306,6 +363,9 @@ int remove_subscription_from_manager(const char* sub_id, struct lws* wsi) {
|
||||
// Update events sent counter before freeing
|
||||
update_subscription_events_sent(sub_id_copy, events_sent_copy);
|
||||
|
||||
// Trigger monitoring update for subscription changes
|
||||
monitoring_on_subscription_change();
|
||||
|
||||
free_subscription(sub);
|
||||
return 0;
|
||||
}
|
||||
@@ -324,37 +384,52 @@ int remove_subscription_from_manager(const char* sub_id, struct lws* wsi) {
|
||||
|
||||
// Check if an event matches a subscription filter
|
||||
int event_matches_filter(cJSON* event, subscription_filter_t* filter) {
|
||||
DEBUG_TRACE("Checking event against subscription filter");
|
||||
|
||||
if (!event || !filter) {
|
||||
DEBUG_TRACE("Exiting event_matches_filter - null parameters");
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Debug: Log event details being tested
|
||||
cJSON* event_kind_obj = cJSON_GetObjectItem(event, "kind");
|
||||
cJSON* event_id_obj = cJSON_GetObjectItem(event, "id");
|
||||
cJSON* event_created_at_obj = cJSON_GetObjectItem(event, "created_at");
|
||||
|
||||
DEBUG_TRACE("FILTER_MATCH: Testing event kind=%d id=%.8s created_at=%ld",
|
||||
event_kind_obj ? (int)cJSON_GetNumberValue(event_kind_obj) : -1,
|
||||
event_id_obj && cJSON_IsString(event_id_obj) ? cJSON_GetStringValue(event_id_obj) : "null",
|
||||
event_created_at_obj ? (long)cJSON_GetNumberValue(event_created_at_obj) : 0);
|
||||
|
||||
// Check kinds filter
|
||||
if (filter->kinds && cJSON_IsArray(filter->kinds)) {
|
||||
DEBUG_TRACE("FILTER_MATCH: Checking kinds filter with %d kinds", cJSON_GetArraySize(filter->kinds));
|
||||
|
||||
cJSON* event_kind = cJSON_GetObjectItem(event, "kind");
|
||||
if (!event_kind || !cJSON_IsNumber(event_kind)) {
|
||||
DEBUG_WARN("FILTER_MATCH: Event has no valid kind field");
|
||||
return 0;
|
||||
}
|
||||
|
||||
int event_kind_val = (int)cJSON_GetNumberValue(event_kind);
|
||||
int kind_match = 0;
|
||||
DEBUG_TRACE("FILTER_MATCH: Event kind=%d", event_kind_val);
|
||||
|
||||
int kind_match = 0;
|
||||
cJSON* kind_item = NULL;
|
||||
cJSON_ArrayForEach(kind_item, filter->kinds) {
|
||||
if (cJSON_IsNumber(kind_item)) {
|
||||
int filter_kind = (int)cJSON_GetNumberValue(kind_item);
|
||||
DEBUG_TRACE("FILTER_MATCH: Comparing event kind %d with filter kind %d", event_kind_val, filter_kind);
|
||||
if (filter_kind == event_kind_val) {
|
||||
kind_match = 1;
|
||||
DEBUG_TRACE("FILTER_MATCH: Kind matched!");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!kind_match) {
|
||||
DEBUG_TRACE("FILTER_MATCH: No kind match, filter rejected");
|
||||
return 0;
|
||||
}
|
||||
DEBUG_TRACE("FILTER_MATCH: Kinds filter passed");
|
||||
}
|
||||
|
||||
// Check authors filter
|
||||
@@ -415,13 +490,19 @@ int event_matches_filter(cJSON* event, subscription_filter_t* filter) {
|
||||
if (filter->since > 0) {
|
||||
cJSON* event_created_at = cJSON_GetObjectItem(event, "created_at");
|
||||
if (!event_created_at || !cJSON_IsNumber(event_created_at)) {
|
||||
DEBUG_WARN("FILTER_MATCH: Event has no valid created_at field");
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
long event_timestamp = (long)cJSON_GetNumberValue(event_created_at);
|
||||
DEBUG_TRACE("FILTER_MATCH: Checking since filter: event_ts=%ld filter_since=%ld",
|
||||
event_timestamp, filter->since);
|
||||
|
||||
if (event_timestamp < filter->since) {
|
||||
DEBUG_TRACE("FILTER_MATCH: Event too old (before since), filter rejected");
|
||||
return 0;
|
||||
}
|
||||
DEBUG_TRACE("FILTER_MATCH: Since filter passed");
|
||||
}
|
||||
|
||||
// Check until filter
|
||||
@@ -503,7 +584,7 @@ int event_matches_filter(cJSON* event, subscription_filter_t* filter) {
|
||||
}
|
||||
}
|
||||
|
||||
DEBUG_TRACE("Exiting event_matches_filter - match found");
|
||||
DEBUG_TRACE("FILTER_MATCH: All filters passed, event matches!");
|
||||
return 1; // All filters passed
|
||||
}
|
||||
|
||||
@@ -513,31 +594,35 @@ int event_matches_subscription(cJSON* event, subscription_t* subscription) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
DEBUG_TRACE("SUB_MATCH: Testing subscription '%s'", subscription->id);
|
||||
|
||||
int filter_num = 0;
|
||||
subscription_filter_t* filter = subscription->filters;
|
||||
while (filter) {
|
||||
filter_num++;
|
||||
DEBUG_TRACE("SUB_MATCH: Testing filter #%d", filter_num);
|
||||
|
||||
if (event_matches_filter(event, filter)) {
|
||||
DEBUG_TRACE("SUB_MATCH: Filter #%d matched! Subscription '%s' matches",
|
||||
filter_num, subscription->id);
|
||||
return 1; // Match found (OR logic)
|
||||
}
|
||||
filter = filter->next;
|
||||
}
|
||||
|
||||
DEBUG_TRACE("SUB_MATCH: No filters matched for subscription '%s'", subscription->id);
|
||||
return 0; // No filters matched
|
||||
}
|
||||
|
||||
// Broadcast event to all matching subscriptions (thread-safe)
|
||||
int broadcast_event_to_subscriptions(cJSON* event) {
|
||||
DEBUG_TRACE("Broadcasting event to subscriptions");
|
||||
|
||||
if (!event) {
|
||||
DEBUG_TRACE("Exiting broadcast_event_to_subscriptions - null event");
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Check if event is expired and should not be broadcast (NIP-40)
|
||||
pthread_mutex_lock(&g_unified_cache.cache_lock);
|
||||
int expiration_enabled = g_unified_cache.expiration_config.enabled;
|
||||
int filter_responses = g_unified_cache.expiration_config.filter_responses;
|
||||
pthread_mutex_unlock(&g_unified_cache.cache_lock);
|
||||
int expiration_enabled = get_config_bool("expiration_enabled", 1);
|
||||
int filter_responses = get_config_bool("expiration_filter", 1);
|
||||
|
||||
if (expiration_enabled && filter_responses) {
|
||||
time_t current_time = time(NULL);
|
||||
@@ -547,7 +632,17 @@ int broadcast_event_to_subscriptions(cJSON* event) {
|
||||
}
|
||||
|
||||
int broadcasts = 0;
|
||||
|
||||
|
||||
// Log event details
|
||||
cJSON* event_kind = cJSON_GetObjectItem(event, "kind");
|
||||
cJSON* event_id = cJSON_GetObjectItem(event, "id");
|
||||
cJSON* event_created_at = cJSON_GetObjectItem(event, "created_at");
|
||||
|
||||
DEBUG_TRACE("BROADCAST: Event kind=%d id=%.8s created_at=%ld",
|
||||
event_kind ? (int)cJSON_GetNumberValue(event_kind) : -1,
|
||||
event_id && cJSON_IsString(event_id) ? cJSON_GetStringValue(event_id) : "null",
|
||||
event_created_at ? (long)cJSON_GetNumberValue(event_created_at) : 0);
|
||||
|
||||
// Create a temporary list of matching subscriptions to avoid holding lock during I/O
|
||||
typedef struct temp_sub {
|
||||
struct lws* wsi;
|
||||
@@ -555,13 +650,21 @@ int broadcast_event_to_subscriptions(cJSON* event) {
|
||||
char client_ip[CLIENT_IP_MAX_LENGTH];
|
||||
struct temp_sub* next;
|
||||
} temp_sub_t;
|
||||
|
||||
|
||||
temp_sub_t* matching_subs = NULL;
|
||||
int matching_count = 0;
|
||||
|
||||
// First pass: collect matching subscriptions while holding lock
|
||||
pthread_mutex_lock(&g_subscription_manager.subscriptions_lock);
|
||||
|
||||
int total_subs = 0;
|
||||
subscription_t* count_sub = g_subscription_manager.active_subscriptions;
|
||||
while (count_sub) {
|
||||
total_subs++;
|
||||
count_sub = count_sub->next;
|
||||
}
|
||||
DEBUG_TRACE("BROADCAST: Checking %d active subscriptions", total_subs);
|
||||
|
||||
subscription_t* sub = g_subscription_manager.active_subscriptions;
|
||||
while (sub) {
|
||||
if (sub->active && sub->wsi && event_matches_subscription(event, sub)) {
|
||||
@@ -613,30 +716,41 @@ int broadcast_event_to_subscriptions(cJSON* event) {
|
||||
if (buf) {
|
||||
memcpy(buf + LWS_PRE, msg_str, msg_len);
|
||||
|
||||
// Send to WebSocket connection with error checking
|
||||
// Note: lws_write can fail if connection is closed, but won't crash
|
||||
int write_result = lws_write(current_temp->wsi, buf + LWS_PRE, msg_len, LWS_WRITE_TEXT);
|
||||
if (write_result >= 0) {
|
||||
// DEBUG: Log WebSocket frame details before sending
|
||||
DEBUG_TRACE("WS_FRAME_SEND: type=EVENT sub=%s len=%zu data=%.100s%s",
|
||||
current_temp->id,
|
||||
msg_len,
|
||||
msg_str,
|
||||
msg_len > 100 ? "..." : "");
|
||||
|
||||
// Queue message for proper libwebsockets pattern
|
||||
struct per_session_data* pss = (struct per_session_data*)lws_wsi_user(current_temp->wsi);
|
||||
if (queue_message(current_temp->wsi, pss, msg_str, msg_len, LWS_WRITE_TEXT) == 0) {
|
||||
// Message queued successfully
|
||||
broadcasts++;
|
||||
|
||||
|
||||
// Update events sent counter for this subscription
|
||||
pthread_mutex_lock(&g_subscription_manager.subscriptions_lock);
|
||||
subscription_t* update_sub = g_subscription_manager.active_subscriptions;
|
||||
while (update_sub) {
|
||||
if (update_sub->wsi == current_temp->wsi &&
|
||||
strcmp(update_sub->id, current_temp->id) == 0) {
|
||||
strcmp(update_sub->id, current_temp->id) == 0 &&
|
||||
update_sub->active) { // Add active check to prevent use-after-free
|
||||
update_sub->events_sent++;
|
||||
break;
|
||||
}
|
||||
update_sub = update_sub->next;
|
||||
}
|
||||
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||
|
||||
|
||||
// Log event broadcast to database (optional - can be disabled for performance)
|
||||
cJSON* event_id_obj = cJSON_GetObjectItem(event, "id");
|
||||
if (event_id_obj && cJSON_IsString(event_id_obj)) {
|
||||
log_event_broadcast(cJSON_GetStringValue(event_id_obj), current_temp->id, current_temp->client_ip);
|
||||
}
|
||||
// NOTE: event_broadcasts table removed due to FOREIGN KEY constraint issues
|
||||
// cJSON* event_id_obj = cJSON_GetObjectItem(event, "id");
|
||||
// if (event_id_obj && cJSON_IsString(event_id_obj)) {
|
||||
// log_event_broadcast(cJSON_GetStringValue(event_id_obj), current_temp->id, current_temp->client_ip);
|
||||
// }
|
||||
} else {
|
||||
DEBUG_ERROR("Failed to queue EVENT message for sub=%s", current_temp->id);
|
||||
}
|
||||
|
||||
free(buf);
|
||||
@@ -661,10 +775,41 @@ int broadcast_event_to_subscriptions(cJSON* event) {
|
||||
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||
|
||||
DEBUG_LOG("Event broadcast complete: %d subscriptions matched", broadcasts);
|
||||
DEBUG_TRACE("Exiting broadcast_event_to_subscriptions");
|
||||
return broadcasts;
|
||||
}
|
||||
|
||||
// Check if any active subscription exists for a specific event kind (thread-safe)
|
||||
int has_subscriptions_for_kind(int event_kind) {
|
||||
pthread_mutex_lock(&g_subscription_manager.subscriptions_lock);
|
||||
|
||||
subscription_t* sub = g_subscription_manager.active_subscriptions;
|
||||
while (sub) {
|
||||
if (sub->active && sub->filters) {
|
||||
subscription_filter_t* filter = sub->filters;
|
||||
while (filter) {
|
||||
// Check if this filter includes our event kind
|
||||
if (filter->kinds && cJSON_IsArray(filter->kinds)) {
|
||||
cJSON* kind_item = NULL;
|
||||
cJSON_ArrayForEach(kind_item, filter->kinds) {
|
||||
if (cJSON_IsNumber(kind_item)) {
|
||||
int filter_kind = (int)cJSON_GetNumberValue(kind_item);
|
||||
if (filter_kind == event_kind) {
|
||||
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||
return 1; // Found matching subscription
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
filter = filter->next;
|
||||
}
|
||||
}
|
||||
sub = sub->next;
|
||||
}
|
||||
|
||||
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||
return 0; // No matching subscriptions
|
||||
}
|
||||
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////
|
||||
/////////////////////////////////////////////////////////////////////////////////////////
|
||||
@@ -676,6 +821,10 @@ int broadcast_event_to_subscriptions(cJSON* event) {
|
||||
void log_subscription_created(const subscription_t* sub) {
|
||||
if (!g_db || !sub) return;
|
||||
|
||||
// Convert wsi pointer to string
|
||||
char wsi_str[32];
|
||||
snprintf(wsi_str, sizeof(wsi_str), "%p", (void*)sub->wsi);
|
||||
|
||||
// Create filter JSON for logging
|
||||
char* filter_json = NULL;
|
||||
if (sub->filters) {
|
||||
@@ -722,16 +871,18 @@ void log_subscription_created(const subscription_t* sub) {
|
||||
cJSON_Delete(filters_array);
|
||||
}
|
||||
|
||||
// Use INSERT OR REPLACE to handle duplicates automatically
|
||||
const char* sql =
|
||||
"INSERT INTO subscription_events (subscription_id, client_ip, event_type, filter_json) "
|
||||
"VALUES (?, ?, 'created', ?)";
|
||||
"INSERT OR REPLACE INTO subscriptions (subscription_id, wsi_pointer, client_ip, event_type, filter_json) "
|
||||
"VALUES (?, ?, ?, 'created', ?)";
|
||||
|
||||
sqlite3_stmt* stmt;
|
||||
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
||||
if (rc == SQLITE_OK) {
|
||||
sqlite3_bind_text(stmt, 1, sub->id, -1, SQLITE_STATIC);
|
||||
sqlite3_bind_text(stmt, 2, sub->client_ip, -1, SQLITE_STATIC);
|
||||
sqlite3_bind_text(stmt, 3, filter_json ? filter_json : "[]", -1, SQLITE_TRANSIENT);
|
||||
sqlite3_bind_text(stmt, 2, wsi_str, -1, SQLITE_TRANSIENT);
|
||||
sqlite3_bind_text(stmt, 3, sub->client_ip, -1, SQLITE_STATIC);
|
||||
sqlite3_bind_text(stmt, 4, filter_json ? filter_json : "[]", -1, SQLITE_TRANSIENT);
|
||||
|
||||
sqlite3_step(stmt);
|
||||
sqlite3_finalize(stmt);
|
||||
@@ -746,8 +897,8 @@ void log_subscription_closed(const char* sub_id, const char* client_ip, const ch
|
||||
if (!g_db || !sub_id) return;
|
||||
|
||||
const char* sql =
|
||||
"INSERT INTO subscription_events (subscription_id, client_ip, event_type) "
|
||||
"VALUES (?, ?, 'closed')";
|
||||
"INSERT INTO subscriptions (subscription_id, wsi_pointer, client_ip, event_type) "
|
||||
"VALUES (?, '', ?, 'closed')";
|
||||
|
||||
sqlite3_stmt* stmt;
|
||||
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
||||
@@ -761,7 +912,7 @@ void log_subscription_closed(const char* sub_id, const char* client_ip, const ch
|
||||
|
||||
// Update the corresponding 'created' entry with end time and events sent
|
||||
const char* update_sql =
|
||||
"UPDATE subscription_events "
|
||||
"UPDATE subscriptions "
|
||||
"SET ended_at = strftime('%s', 'now') "
|
||||
"WHERE subscription_id = ? AND event_type = 'created' AND ended_at IS NULL";
|
||||
|
||||
@@ -779,7 +930,7 @@ void log_subscription_disconnected(const char* client_ip) {
|
||||
|
||||
// Mark all active subscriptions for this client as disconnected
|
||||
const char* sql =
|
||||
"UPDATE subscription_events "
|
||||
"UPDATE subscriptions "
|
||||
"SET ended_at = strftime('%s', 'now') "
|
||||
"WHERE client_ip = ? AND event_type = 'created' AND ended_at IS NULL";
|
||||
|
||||
@@ -794,8 +945,8 @@ void log_subscription_disconnected(const char* client_ip) {
|
||||
if (changes > 0) {
|
||||
// Log a disconnection event
|
||||
const char* insert_sql =
|
||||
"INSERT INTO subscription_events (subscription_id, client_ip, event_type) "
|
||||
"VALUES ('disconnect', ?, 'disconnected')";
|
||||
"INSERT INTO subscriptions (subscription_id, wsi_pointer, client_ip, event_type) "
|
||||
"VALUES ('disconnect', '', ?, 'disconnected')";
|
||||
|
||||
rc = sqlite3_prepare_v2(g_db, insert_sql, -1, &stmt, NULL);
|
||||
if (rc == SQLITE_OK) {
|
||||
@@ -808,31 +959,32 @@ void log_subscription_disconnected(const char* client_ip) {
|
||||
}
|
||||
|
||||
// Log event broadcast to database (optional, can be resource intensive)
|
||||
void log_event_broadcast(const char* event_id, const char* sub_id, const char* client_ip) {
|
||||
if (!g_db || !event_id || !sub_id || !client_ip) return;
|
||||
|
||||
const char* sql =
|
||||
"INSERT INTO event_broadcasts (event_id, subscription_id, client_ip) "
|
||||
"VALUES (?, ?, ?)";
|
||||
|
||||
sqlite3_stmt* stmt;
|
||||
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
||||
if (rc == SQLITE_OK) {
|
||||
sqlite3_bind_text(stmt, 1, event_id, -1, SQLITE_STATIC);
|
||||
sqlite3_bind_text(stmt, 2, sub_id, -1, SQLITE_STATIC);
|
||||
sqlite3_bind_text(stmt, 3, client_ip, -1, SQLITE_STATIC);
|
||||
|
||||
sqlite3_step(stmt);
|
||||
sqlite3_finalize(stmt);
|
||||
}
|
||||
}
|
||||
// REMOVED: event_broadcasts table removed due to FOREIGN KEY constraint issues
|
||||
// void log_event_broadcast(const char* event_id, const char* sub_id, const char* client_ip) {
|
||||
// if (!g_db || !event_id || !sub_id || !client_ip) return;
|
||||
//
|
||||
// const char* sql =
|
||||
// "INSERT INTO event_broadcasts (event_id, subscription_id, client_ip) "
|
||||
// "VALUES (?, ?, ?)";
|
||||
//
|
||||
// sqlite3_stmt* stmt;
|
||||
// int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
||||
// if (rc == SQLITE_OK) {
|
||||
// sqlite3_bind_text(stmt, 1, event_id, -1, SQLITE_STATIC);
|
||||
// sqlite3_bind_text(stmt, 2, sub_id, -1, SQLITE_STATIC);
|
||||
// sqlite3_bind_text(stmt, 3, client_ip, -1, SQLITE_STATIC);
|
||||
//
|
||||
// sqlite3_step(stmt);
|
||||
// sqlite3_finalize(stmt);
|
||||
// }
|
||||
// }
|
||||
|
||||
// Update events sent counter for a subscription
|
||||
void update_subscription_events_sent(const char* sub_id, int events_sent) {
|
||||
if (!g_db || !sub_id) return;
|
||||
|
||||
const char* sql =
|
||||
"UPDATE subscription_events "
|
||||
"UPDATE subscriptions "
|
||||
"SET events_sent = ? "
|
||||
"WHERE subscription_id = ? AND event_type = 'created'";
|
||||
|
||||
@@ -847,6 +999,44 @@ void update_subscription_events_sent(const char* sub_id, int events_sent) {
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup all subscriptions on startup
|
||||
void cleanup_all_subscriptions_on_startup(void) {
|
||||
if (!g_db) {
|
||||
DEBUG_ERROR("Database not available for startup cleanup");
|
||||
return;
|
||||
}
|
||||
|
||||
DEBUG_LOG("Performing startup subscription cleanup");
|
||||
|
||||
// Mark all active subscriptions as disconnected
|
||||
const char* sql =
|
||||
"UPDATE subscriptions "
|
||||
"SET ended_at = strftime('%s', 'now') "
|
||||
"WHERE event_type = 'created' AND ended_at IS NULL";
|
||||
|
||||
sqlite3_stmt* stmt;
|
||||
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
||||
if (rc != SQLITE_OK) {
|
||||
DEBUG_ERROR("Failed to prepare startup cleanup query");
|
||||
return;
|
||||
}
|
||||
|
||||
rc = sqlite3_step(stmt);
|
||||
int changes = sqlite3_changes(g_db);
|
||||
sqlite3_finalize(stmt);
|
||||
|
||||
if (rc != SQLITE_DONE) {
|
||||
DEBUG_ERROR("Failed to execute startup cleanup");
|
||||
return;
|
||||
}
|
||||
|
||||
if (changes > 0) {
|
||||
DEBUG_LOG("Startup cleanup: marked %d orphaned subscriptions as disconnected", changes);
|
||||
} else {
|
||||
DEBUG_LOG("Startup cleanup: no orphaned subscriptions found");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////
|
||||
///////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
@@ -93,6 +93,7 @@ struct subscription_manager {
|
||||
};
|
||||
|
||||
// Function declarations
|
||||
int validate_subscription_id(const char* sub_id);
|
||||
subscription_filter_t* create_subscription_filter(cJSON* filter_json);
|
||||
void free_subscription_filter(subscription_filter_t* filter);
|
||||
subscription_t* create_subscription(const char* sub_id, struct lws* wsi, cJSON* filters_array, const char* client_ip);
|
||||
@@ -114,7 +115,12 @@ int get_active_connections_for_ip(const char* client_ip);
|
||||
void log_subscription_created(const subscription_t* sub);
|
||||
void log_subscription_closed(const char* sub_id, const char* client_ip, const char* reason);
|
||||
void log_subscription_disconnected(const char* client_ip);
|
||||
void log_event_broadcast(const char* event_id, const char* sub_id, const char* client_ip);
|
||||
void update_subscription_events_sent(const char* sub_id, int events_sent);
|
||||
|
||||
// Subscription query functions
|
||||
int has_subscriptions_for_kind(int event_kind);
|
||||
|
||||
// Startup cleanup function
|
||||
void cleanup_all_subscriptions_on_startup(void);
|
||||
|
||||
#endif // SUBSCRIPTIONS_H
|
||||
1325
src/websockets.c
@@ -21,16 +21,24 @@
|
||||
|
||||
// Filter validation constants
|
||||
#define MAX_FILTERS_PER_REQUEST 10
|
||||
#define MAX_AUTHORS_PER_FILTER 100
|
||||
#define MAX_IDS_PER_FILTER 100
|
||||
#define MAX_KINDS_PER_FILTER 50
|
||||
#define MAX_TAG_VALUES_PER_FILTER 100
|
||||
#define MAX_AUTHORS_PER_FILTER 1000
|
||||
#define MAX_IDS_PER_FILTER 1000
|
||||
#define MAX_KINDS_PER_FILTER 500
|
||||
#define MAX_TAG_VALUES_PER_FILTER 1000
|
||||
#define MAX_KIND_VALUE 65535
|
||||
#define MAX_TIMESTAMP_VALUE 2147483647 // Max 32-bit signed int
|
||||
#define MAX_LIMIT_VALUE 5000
|
||||
#define MAX_SEARCH_LENGTH 256
|
||||
#define MAX_TAG_VALUE_LENGTH 1024
|
||||
|
||||
// Message queue node for proper libwebsockets pattern
|
||||
struct message_queue_node {
|
||||
unsigned char* data; // Message data (with LWS_PRE space)
|
||||
size_t length; // Message length (without LWS_PRE)
|
||||
enum lws_write_protocol type; // LWS_WRITE_TEXT, etc.
|
||||
struct message_queue_node* next; // Next node in queue
|
||||
};
|
||||
|
||||
// Enhanced per-session data with subscription management, NIP-42 authentication, and rate limiting
|
||||
struct per_session_data {
|
||||
int authenticated;
|
||||
@@ -38,6 +46,7 @@ struct per_session_data {
|
||||
pthread_mutex_t session_lock; // Per-session thread safety
|
||||
char client_ip[CLIENT_IP_MAX_LENGTH]; // Client IP for logging
|
||||
int subscription_count; // Number of subscriptions for this session
|
||||
time_t connection_established; // When WebSocket connection was established
|
||||
|
||||
// NIP-42 Authentication State
|
||||
char authenticated_pubkey[65]; // Authenticated public key (64 hex + null)
|
||||
@@ -58,6 +67,18 @@ struct per_session_data {
|
||||
int malformed_request_count; // Count of malformed requests in current hour
|
||||
time_t malformed_request_window_start; // Start of current hour window
|
||||
time_t malformed_request_blocked_until; // Time until blocked for malformed requests
|
||||
|
||||
// Message queue for proper libwebsockets pattern (replaces single buffer)
|
||||
struct message_queue_node* message_queue_head; // Head of message queue
|
||||
struct message_queue_node* message_queue_tail; // Tail of message queue
|
||||
int message_queue_count; // Number of messages in queue
|
||||
int writeable_requested; // Flag: 1 if writeable callback requested
|
||||
|
||||
// Message reassembly for handling fragmented WebSocket messages
|
||||
char* reassembly_buffer; // Buffer for accumulating message fragments (NULL when not reassembling)
|
||||
size_t reassembly_size; // Current size of accumulated data
|
||||
size_t reassembly_capacity; // Allocated capacity of reassembly buffer
|
||||
int reassembly_active; // Flag: 1 if currently reassembling a message
|
||||
};
|
||||
|
||||
// NIP-11 HTTP session data structure for managing buffer lifetime
|
||||
@@ -72,6 +93,10 @@ struct nip11_session_data {
|
||||
// Function declarations
|
||||
int start_websocket_relay(int port_override, int strict_port);
|
||||
|
||||
// Message queue functions for proper libwebsockets pattern
|
||||
int queue_message(struct lws* wsi, struct per_session_data* pss, const char* message, size_t length, enum lws_write_protocol type);
|
||||
int process_message_queue(struct lws* wsi, struct per_session_data* pss);
|
||||
|
||||
// Auth rules checking function from request_validator.c
|
||||
int check_database_auth_rules(const char *pubkey, const char *operation, const char *resource_hash);
|
||||
|
||||
|
||||
5
tests/.test_keys.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
# Test key configuration (from make_and_restart_relay.sh -t)
|
||||
ADMIN_PRIVATE_KEY="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
ADMIN_PUBLIC_KEY="6a04ab98d9e4774ad806e302dddeb63bea16b5cb5f223ee77478e861bb583eb3"
|
||||
RELAY_PUBLIC_KEY="4f355bdcb7cc0af728ef3cceb9615d90684bb5b2ca5f859ab0f0b704075871aa"
|
||||
RELAY_URL="ws://localhost:8888"
|
||||
12
tests/debug.log
Normal file
@@ -0,0 +1,12 @@
|
||||
|
||||
=== NOSTR WebSocket Debug Log Started ===
|
||||
[14:13:42.079] SEND localhost:8888: ["EVENT", {
|
||||
"pubkey": "e74e808f64b82fe4671b92cdf83f6dd5f5f44dbcb67fbd0e044f34a6193e0994",
|
||||
"created_at": 1761499244,
|
||||
"kind": 1059,
|
||||
"tags": [["p", "4f355bdcb7cc0af728ef3cceb9615d90684bb5b2ca5f859ab0f0b704075871aa"]],
|
||||
"content": "ApTb8y2oD3/TtVCV73Szhgfh5ODlluGd5zjsH44g5BBwaGB1NshOJ/5kF/XN0TfYJKQBe07UTpnOYMZ4l2ppU6SrR8Tor+ZEiAF/kpCpa/x6LDDIvf4mueQicDKjOf8Y6oEbsxYjtFrpuSC0LLMgLaVhcZjAgVD0YQTo+8nHOzHZD5RBr305vdnrxIe4ubEficAHCpnKq9L3A46AIyb+aHjjTbSYmB061cf6hzLSnmdh5xeACExjhxwsX9ivSvqGYcDNsH1JCM8EYQyRX9xAPDBYM1yuS8PpadqMluOcqOd/FFYyjYNpFrardblPsjUzZTz/TDSLyrYFDUKNa7pWIhW1asc1ZaY0ry0AoWnbl/QyMxqBjDFXd3mJfWccYsOI/Yrx3sxbZdL+ayRlQeQuDk/M9rQkH8GN/5+GE1aN5I6eVl0F37Axc/lLuIt/AIpoTwZYAEi9j/BYGLP6sYkjUp0foz91QximOTgu8evynu+nfAv330HVkipTIGOjEZea7QNSK0Fylxs8fanHlmiqWGyfyBeoWpxGslHZVu6K9k7GC8ABEIdNRa8vlqlphPfWPCS70Lnq3LgeKOj1C3sNF9ST8g7pth/0FEZgXruzhpx/EyjsasNbdLZg3iX1QwRS0P4L341Flrztovt8npyP9ytTiukkYIQzXCX8XuWjiaUuzXiLkVazjh0Nl03ikKKu2+7nuaBB92geBjbGT76zZ6HeXBgcmC7dWn7pHhzqu+QTonZK0oCl427Fs0eXiYsILjxFFQkmk7OHXgdZF9jquNXloz5lgwY9S3xj4JyRwLN/9xfh16awxLZNEFvX10X97bXsmNMRUDrJJPkKMTSxZpvuTbd+Lx2iB++4NyGZibNa6nOWOJG9d2LwEzIcIHS0uQpEIPl7Ccz6+rmkVh9kLbB2rda2fYp9GCOcn6XbfaXZZXJM+HAQwPJgrtDiuQex0tEIcQcB9CYCN4ze9HCt1kb23TUgEDAipz/RqYP4dOCYmRZ7vaYk/irJ+iRDfnvPK0Id1TrSeo5kaVc7py2zWZRVdndpTM8RvW0SLwdldXDIv+ym/mS0L7bchoaYjoNeuTNKQ6AOoc0E7f4ySr65FUKYd2FTvIsP2Avsa3S+D0za30ensxr733l80AQlVmUPrhsgOzzjEuOW1hGlGus38X+CDDEuMSJnq3hvz/CxVtAk71Zkbyr5lc1BPi758Y4rlZFQnhaKYKv5nSFJc7GtDykv+1cwxNGC6AxGKprnYMDVxuAIFYBztFitdO5BsjWvvKzAbleszewtGfjE2NgltIJk+gQlTpWvLNxd3gvb+qHarfEv7BPnPfsKktDpEfuNMKXdJPANyACq5gXj854o/X8iO2iLm7JSdMhEQgIIyHNyLCCQdLDnqDWIfcdyIzAfRilSCwImt3CVJBGD7HoXRbwGRR3vgEBcoVPmsYzaU9vr62I=",
|
||||
"id": "75c178ee47aac3ab9e984ddb85bdf9d8c68ade0d97e9cd86bb39e3110218a589",
|
||||
"sig": "aba8382cc8d6ba6bba467109d2ddc19718732fe803d71e73fd2db62c1cbbb1b4527447240906e01755139067a71c75d8c03271826ca5d0226c818cb7fb495fe2"
|
||||
}]
|
||||
[14:13:42.083] RECV localhost:8888: ["OK", "75c178ee47aac3ab9e984ddb85bdf9d8c68ade0d97e9cd86bb39e3110218a589", true, ""]
|
||||
35
tests/ephemeral_test.sh
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Simplified Ephemeral Event Test
|
||||
# Tests that ephemeral events are broadcast to active subscriptions
|
||||
|
||||
echo "=== Generating Ephemeral Event (kind 20000) ==="
|
||||
event=$(nak event --kind 20000 --content "test ephemeral event")
|
||||
echo "$event"
|
||||
echo ""
|
||||
|
||||
echo "=== Testing Ephemeral Event Broadcast ==="
|
||||
subscription='["REQ","test_sub",{"kinds":[20000],"limit":10}]'
|
||||
echo "Subscription Filter:"
|
||||
echo "$subscription"
|
||||
echo ""
|
||||
|
||||
event_msg='["EVENT",'"$event"']'
|
||||
echo "Event Message:"
|
||||
echo "$event_msg"
|
||||
echo ""
|
||||
|
||||
echo "=== Relay Responses ==="
|
||||
(
|
||||
# Send subscription
|
||||
printf "%s\n" "$subscription"
|
||||
# Wait for subscription to establish
|
||||
sleep 1
|
||||
# Send ephemeral event on same connection
|
||||
printf "%s\n" "$event_msg"
|
||||
# Wait for responses
|
||||
sleep 2
|
||||
) | timeout 5 websocat ws://127.0.0.1:8888
|
||||
|
||||
echo ""
|
||||
echo "Test complete!"
|
||||
63
tests/large_event_test.sh
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Test script for posting large events (>4KB) to test partial write handling
|
||||
# Uses nak to properly sign events with large content
|
||||
|
||||
RELAY_URL="ws://localhost:8888"
|
||||
|
||||
# Check if nak is installed
|
||||
if ! command -v nak &> /dev/null; then
|
||||
echo "Error: nak is not installed. Install with: go install github.com/fiatjaf/nak@latest"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Generate a test private key if not set
|
||||
if [ -z "$NOSTR_PRIVATE_KEY" ]; then
|
||||
echo "Generating temporary test key..."
|
||||
export NOSTR_PRIVATE_KEY=$(nak key generate)
|
||||
fi
|
||||
|
||||
echo "=== Large Event Test ==="
|
||||
echo "Testing partial write handling with events >4KB"
|
||||
echo "Relay: $RELAY_URL"
|
||||
echo ""
|
||||
|
||||
# Test 1: 5KB event
|
||||
echo "Test 1: Posting 5KB event..."
|
||||
CONTENT_5KB=$(python3 -c "print('A' * 5000)")
|
||||
echo "$CONTENT_5KB" | nak event -k 1 --content - $RELAY_URL
|
||||
sleep 1
|
||||
|
||||
# Test 2: 10KB event
|
||||
echo ""
|
||||
echo "Test 2: Posting 10KB event..."
|
||||
CONTENT_10KB=$(python3 -c "print('B' * 10000)")
|
||||
echo "$CONTENT_10KB" | nak event -k 1 --content - $RELAY_URL
|
||||
sleep 1
|
||||
|
||||
# Test 3: 20KB event
|
||||
echo ""
|
||||
echo "Test 3: Posting 20KB event..."
|
||||
CONTENT_20KB=$(python3 -c "print('C' * 20000)")
|
||||
echo "$CONTENT_20KB" | nak event -k 1 --content - $RELAY_URL
|
||||
sleep 1
|
||||
|
||||
# Test 4: 50KB event (very large)
|
||||
echo ""
|
||||
echo "Test 4: Posting 50KB event..."
|
||||
CONTENT_50KB=$(python3 -c "print('D' * 50000)")
|
||||
echo "$CONTENT_50KB" | nak event -k 1 --content - $RELAY_URL
|
||||
|
||||
echo ""
|
||||
echo "=== Test Complete ==="
|
||||
echo ""
|
||||
echo "Check relay.log for:"
|
||||
echo " - 'Queued partial write' messages (indicates buffering is working)"
|
||||
echo " - 'write completed' messages (indicates retry succeeded)"
|
||||
echo " - No 'Invalid frame header' errors"
|
||||
echo ""
|
||||
echo "To view logs in real-time:"
|
||||
echo " tail -f relay.log | grep -E '(partial|write completed|Invalid frame)'"
|
||||
echo ""
|
||||
echo "To check if events were stored:"
|
||||
echo " sqlite3 build/*.db 'SELECT id, length(content) as content_size FROM events ORDER BY created_at DESC LIMIT 4;'"
|
||||
53
tests/post_events.sh
Executable file
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Test script to post kind 1 events to the relay every second
|
||||
# Cycles through three different secret keys
|
||||
# Content includes current timestamp
|
||||
#
|
||||
# Usage: ./post_events.sh <relay_url>
|
||||
# Example: ./post_events.sh ws://localhost:8888
|
||||
# Example: ./post_events.sh wss://relay.laantungir.net
|
||||
|
||||
# Check if relay URL is provided
|
||||
if [ -z "$1" ]; then
|
||||
echo "Error: Relay URL is required"
|
||||
echo "Usage: $0 <relay_url>"
|
||||
echo "Example: $0 ws://localhost:8888"
|
||||
echo "Example: $0 wss://relay.laantungir.net"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Array of secret keys to cycle through
|
||||
SECRET_KEYS=(
|
||||
"3fdd8227a920c2385559400b2b14e464f22e80df312a73cc7a86e1d7e91d608f"
|
||||
"a156011cd65b71f84b4a488ac81687f2aed57e490b31c28f58195d787030db60"
|
||||
"1618aaa21f5bd45c5ffede0d9a60556db67d4a046900e5f66b0bae5c01c801fb"
|
||||
)
|
||||
|
||||
RELAY_URL="$1"
|
||||
KEY_INDEX=0
|
||||
|
||||
echo "Starting event posting test to $RELAY_URL"
|
||||
echo "Press Ctrl+C to stop"
|
||||
|
||||
while true; do
|
||||
# Get current timestamp
|
||||
TIMESTAMP=$(date +"%Y-%m-%d %H:%M:%S UTC")
|
||||
|
||||
# Get current secret key
|
||||
CURRENT_KEY=${SECRET_KEYS[$KEY_INDEX]}
|
||||
|
||||
# Create content with timestamp
|
||||
CONTENT="Test event at $TIMESTAMP"
|
||||
|
||||
echo "[$TIMESTAMP] Posting event with key ${KEY_INDEX}: ${CURRENT_KEY:0:16}..."
|
||||
|
||||
# Post event using nak
|
||||
nak event -c "$CONTENT" --sec "$CURRENT_KEY" "$RELAY_URL"
|
||||
|
||||
# Cycle to next key
|
||||
KEY_INDEX=$(( (KEY_INDEX + 1) % ${#SECRET_KEYS[@]} ))
|
||||
|
||||
# Wait 1 second
|
||||
sleep .2
|
||||
done
|
||||
@@ -1,203 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Rate Limiting Test Suite for C-Relay
|
||||
# Tests rate limiting and abuse prevention mechanisms
|
||||
|
||||
set -e
|
||||
|
||||
# Configuration
|
||||
RELAY_HOST="127.0.0.1"
|
||||
RELAY_PORT="8888"
|
||||
TEST_TIMEOUT=15
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Test counters
|
||||
TOTAL_TESTS=0
|
||||
PASSED_TESTS=0
|
||||
FAILED_TESTS=0
|
||||
|
||||
# Function to test rate limiting
|
||||
test_rate_limiting() {
|
||||
local description="$1"
|
||||
local message="$2"
|
||||
local burst_count="${3:-10}"
|
||||
local expected_limited="${4:-false}"
|
||||
|
||||
TOTAL_TESTS=$((TOTAL_TESTS + 1))
|
||||
|
||||
echo -n "Testing $description... "
|
||||
|
||||
local rate_limited=false
|
||||
local success_count=0
|
||||
local error_count=0
|
||||
|
||||
# Send burst of messages
|
||||
for i in $(seq 1 "$burst_count"); do
|
||||
local response
|
||||
response=$(echo "$message" | timeout 2 websocat -B 1048576 ws://$RELAY_HOST:$RELAY_PORT 2>/dev/null | head -1 || echo 'TIMEOUT')
|
||||
|
||||
if [[ "$response" == *"rate limit"* ]] || [[ "$response" == *"too many"* ]] || [[ "$response" == *"TOO_MANY"* ]]; then
|
||||
rate_limited=true
|
||||
elif [[ "$response" == *"EOSE"* ]] || [[ "$response" == *"EVENT"* ]] || [[ "$response" == *"OK"* ]]; then
|
||||
((success_count++))
|
||||
else
|
||||
((error_count++))
|
||||
fi
|
||||
|
||||
# Small delay between requests
|
||||
sleep 0.05
|
||||
done
|
||||
|
||||
if [[ "$expected_limited" == "true" ]]; then
|
||||
if [[ "$rate_limited" == "true" ]]; then
|
||||
echo -e "${GREEN}PASSED${NC} - Rate limiting triggered as expected"
|
||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
||||
return 0
|
||||
else
|
||||
echo -e "${RED}FAILED${NC} - Rate limiting not triggered (expected)"
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
if [[ "$rate_limited" == "false" ]]; then
|
||||
echo -e "${GREEN}PASSED${NC} - No rate limiting for normal traffic"
|
||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
||||
return 0
|
||||
else
|
||||
echo -e "${YELLOW}UNCERTAIN${NC} - Unexpected rate limiting"
|
||||
PASSED_TESTS=$((PASSED_TESTS + 1)) # Count as passed since it's conservative
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to test sustained load
|
||||
test_sustained_load() {
|
||||
local description="$1"
|
||||
local message="$2"
|
||||
local duration="${3:-10}"
|
||||
|
||||
TOTAL_TESTS=$((TOTAL_TESTS + 1))
|
||||
|
||||
echo -n "Testing $description... "
|
||||
|
||||
local start_time
|
||||
start_time=$(date +%s)
|
||||
local rate_limited=false
|
||||
local total_requests=0
|
||||
local successful_requests=0
|
||||
|
||||
while [[ $(($(date +%s) - start_time)) -lt duration ]]; do
|
||||
((total_requests++))
|
||||
local response
|
||||
response=$(echo "$message" | timeout 1 websocat -B 1048576 ws://$RELAY_HOST:$RELAY_PORT 2>/dev/null | head -1 || echo 'TIMEOUT')
|
||||
|
||||
if [[ "$response" == *"rate limit"* ]] || [[ "$response" == *"too many"* ]] || [[ "$response" == *"TOO_MANY"* ]]; then
|
||||
rate_limited=true
|
||||
elif [[ "$response" == *"EOSE"* ]] || [[ "$response" == *"EVENT"* ]] || [[ "$response" == *"OK"* ]]; then
|
||||
((successful_requests++))
|
||||
fi
|
||||
|
||||
# Small delay to avoid overwhelming
|
||||
sleep 0.1
|
||||
done
|
||||
|
||||
local success_rate=0
|
||||
if [[ $total_requests -gt 0 ]]; then
|
||||
success_rate=$((successful_requests * 100 / total_requests))
|
||||
fi
|
||||
|
||||
if [[ "$rate_limited" == "true" ]]; then
|
||||
echo -e "${GREEN}PASSED${NC} - Rate limiting activated under sustained load (${success_rate}% success rate)"
|
||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
||||
return 0
|
||||
else
|
||||
echo -e "${YELLOW}UNCERTAIN${NC} - No rate limiting detected (${success_rate}% success rate)"
|
||||
# This might be acceptable if rate limiting is very permissive
|
||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
echo "=========================================="
|
||||
echo "C-Relay Rate Limiting Test Suite"
|
||||
echo "=========================================="
|
||||
echo "Testing rate limiting against relay at ws://$RELAY_HOST:$RELAY_PORT"
|
||||
echo ""
|
||||
|
||||
# Test basic connectivity first
|
||||
echo "=== Basic Connectivity Test ==="
|
||||
test_rate_limiting "Basic connectivity" '["REQ","rate_test",{}]' 1 false
|
||||
echo ""
|
||||
|
||||
echo "=== Burst Request Testing ==="
|
||||
# Test rapid succession of requests
|
||||
test_rate_limiting "Rapid REQ messages" '["REQ","burst_req_'$(date +%s%N)'",{}]' 20 true
|
||||
test_rate_limiting "Rapid COUNT messages" '["COUNT","burst_count_'$(date +%s%N)'",{}]' 20 true
|
||||
test_rate_limiting "Rapid CLOSE messages" '["CLOSE","burst_close"]' 20 true
|
||||
echo ""
|
||||
|
||||
echo "=== Malformed Message Rate Limiting ==="
|
||||
# Test if malformed messages trigger rate limiting faster
|
||||
test_rate_limiting "Malformed JSON burst" '["REQ","malformed"' 15 true
|
||||
test_rate_limiting "Invalid message type burst" '["INVALID","test",{}]' 15 true
|
||||
test_rate_limiting "Empty message burst" '[]' 15 true
|
||||
echo ""
|
||||
|
||||
echo "=== Sustained Load Testing ==="
|
||||
# Test sustained moderate load
|
||||
test_sustained_load "Sustained REQ load" '["REQ","sustained_'$(date +%s%N)'",{}]' 10
|
||||
test_sustained_load "Sustained COUNT load" '["COUNT","sustained_count_'$(date +%s%N)'",{}]' 10
|
||||
echo ""
|
||||
|
||||
echo "=== Filter Complexity Testing ==="
|
||||
# Test if complex filters trigger rate limiting
|
||||
test_rate_limiting "Complex filter burst" '["REQ","complex_'$(date +%s%N)'",{"authors":["a","b","c"],"kinds":[1,2,3],"#e":["x","y","z"],"#p":["m","n","o"],"since":1000000000,"until":2000000000,"limit":100}]' 10 true
|
||||
echo ""
|
||||
|
||||
echo "=== Subscription Management Testing ==="
|
||||
# Test subscription creation/deletion rate limiting
|
||||
echo -n "Testing subscription churn... "
|
||||
local churn_test_passed=true
|
||||
for i in $(seq 1 25); do
|
||||
# Create subscription
|
||||
echo "[\"REQ\",\"churn_${i}_$(date +%s%N)\",{}]" | timeout 1 websocat -B 1048576 ws://$RELAY_HOST:$RELAY_PORT >/dev/null 2>&1 || true
|
||||
|
||||
# Close subscription
|
||||
echo "[\"CLOSE\",\"churn_${i}_*\"]" | timeout 1 websocat -B 1048576 ws://$RELAY_HOST:$RELAY_PORT >/dev/null 2>&1 || true
|
||||
|
||||
sleep 0.05
|
||||
done
|
||||
|
||||
# Check if relay is still responsive
|
||||
if echo 'ping' | timeout 2 websocat -n1 ws://$RELAY_HOST:$RELAY_PORT >/dev/null 2>&1; then
|
||||
echo -e "${GREEN}PASSED${NC} - Subscription churn handled"
|
||||
TOTAL_TESTS=$((TOTAL_TESTS + 1))
|
||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
||||
else
|
||||
echo -e "${RED}FAILED${NC} - Relay unresponsive after subscription churn"
|
||||
TOTAL_TESTS=$((TOTAL_TESTS + 1))
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
fi
|
||||
echo ""
|
||||
|
||||
echo "=== Test Results ==="
|
||||
echo "Total tests: $TOTAL_TESTS"
|
||||
echo -e "Passed: ${GREEN}$PASSED_TESTS${NC}"
|
||||
echo -e "Failed: ${RED}$FAILED_TESTS${NC}"
|
||||
|
||||
if [[ $FAILED_TESTS -eq 0 ]]; then
|
||||
echo -e "${GREEN}✓ All rate limiting tests passed!${NC}"
|
||||
echo "Rate limiting appears to be working correctly."
|
||||
exit 0
|
||||
else
|
||||
echo -e "${RED}✗ Some rate limiting tests failed!${NC}"
|
||||
echo "Rate limiting may not be properly configured."
|
||||
exit 1
|
||||
fi
|
||||
BIN
tests/sendDM
Executable file
296
tests/sendDM.c
Normal file
@@ -0,0 +1,296 @@
|
||||
/*
|
||||
* NIP-17 Private Direct Messages - Command Line Application
|
||||
*
|
||||
* This example demonstrates how to send NIP-17 private direct messages
|
||||
* using the Nostr Core Library.
|
||||
*
|
||||
* Usage:
|
||||
* ./send_nip17_dm -r <recipient> -s <sender> [-R <relay>]... <message>
|
||||
*
|
||||
* Options:
|
||||
* -r <recipient>: The recipient's public key (npub or hex)
|
||||
* -s <sender>: The sender's private key (nsec or hex)
|
||||
* -R <relay>: Relay URL to send to (can be specified multiple times)
|
||||
* <message>: The message to send (must be the last argument)
|
||||
*
|
||||
* If no relays are specified, uses default relay.
|
||||
* If no sender key is provided, uses a default test key.
|
||||
*
|
||||
* Examples:
|
||||
* ./send_nip17_dm -r npub1example... -s nsec1test... -R wss://relay1.com "Hello from NIP-17!"
|
||||
* ./send_nip17_dm -r 4f355bdcb7cc0af728ef3cceb9615d90684bb5b2ca5f859ab0f0b704075871aa -s aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa -R ws://localhost:8888 "config"
|
||||
*/
|
||||
|
||||
#define _GNU_SOURCE
|
||||
#define _POSIX_C_SOURCE 200809L
|
||||
|
||||
#include "../nostr_core_lib/nostr_core/nostr_core.h"
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <unistd.h>
|
||||
#include <getopt.h>
|
||||
|
||||
// Default test private key (for demonstration - DO NOT USE IN PRODUCTION)
|
||||
#define DEFAULT_SENDER_NSEC "nsec12kgt0dv2k2safv6s32w8f89z9uw27e68hjaa0d66c5xvk70ezpwqncd045"
|
||||
|
||||
// Default relay for sending DMs
|
||||
#define DEFAULT_RELAY "wss://relay.laantungir.net"
|
||||
|
||||
// Progress callback for publishing
|
||||
void publish_progress_callback(const char* relay_url, const char* status,
|
||||
const char* message, int success_count,
|
||||
int total_relays, int completed_relays, void* user_data) {
|
||||
(void)user_data;
|
||||
|
||||
if (relay_url) {
|
||||
printf("📡 [%s]: %s", relay_url, status);
|
||||
if (message) {
|
||||
printf(" - %s", message);
|
||||
}
|
||||
printf(" (%d/%d completed, %d successful)\n", completed_relays, total_relays, success_count);
|
||||
} else {
|
||||
printf("📡 PUBLISH COMPLETE: %d/%d successful\n", success_count, total_relays);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert npub or hex pubkey to hex format
|
||||
*/
|
||||
int convert_pubkey_to_hex(const char* input_pubkey, char* output_hex) {
|
||||
// Check if it's already hex (64 characters)
|
||||
if (strlen(input_pubkey) == 64) {
|
||||
// Assume it's already hex
|
||||
strcpy(output_hex, input_pubkey);
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Check if it's an npub (starts with "npub1")
|
||||
if (strncmp(input_pubkey, "npub1", 5) == 0) {
|
||||
// Convert npub to hex
|
||||
unsigned char pubkey_bytes[32];
|
||||
if (nostr_decode_npub(input_pubkey, pubkey_bytes) != 0) {
|
||||
fprintf(stderr, "Error: Invalid npub format\n");
|
||||
return -1;
|
||||
}
|
||||
nostr_bytes_to_hex(pubkey_bytes, 32, output_hex);
|
||||
return 0;
|
||||
}
|
||||
|
||||
fprintf(stderr, "Error: Public key must be 64-character hex or valid npub\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert nsec to private key bytes if needed
|
||||
*/
|
||||
int convert_nsec_to_private_key(const char* input_nsec, unsigned char* private_key) {
|
||||
// Check if it's already hex (64 characters)
|
||||
if (strlen(input_nsec) == 64) {
|
||||
// Convert hex to bytes
|
||||
if (nostr_hex_to_bytes(input_nsec, private_key, 32) != 0) {
|
||||
fprintf(stderr, "Error: Invalid hex private key\n");
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Check if it's an nsec (starts with "nsec1")
|
||||
if (strncmp(input_nsec, "nsec1", 5) == 0) {
|
||||
// Convert nsec directly to private key bytes
|
||||
if (nostr_decode_nsec(input_nsec, private_key) != 0) {
|
||||
fprintf(stderr, "Error: Invalid nsec format\n");
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
fprintf(stderr, "Error: Private key must be 64-character hex or valid nsec\n");
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main function
|
||||
*/
|
||||
int main(int argc, char* argv[]) {
|
||||
char* recipient_key = NULL;
|
||||
char* sender_key = NULL;
|
||||
char** relays = NULL;
|
||||
int relay_count = 0;
|
||||
char* message = NULL;
|
||||
|
||||
// Parse command line options
|
||||
int opt;
|
||||
while ((opt = getopt(argc, argv, "r:s:R:")) != -1) {
|
||||
switch (opt) {
|
||||
case 'r':
|
||||
recipient_key = optarg;
|
||||
break;
|
||||
case 's':
|
||||
sender_key = optarg;
|
||||
break;
|
||||
case 'R':
|
||||
relays = realloc(relays, (relay_count + 1) * sizeof(char*));
|
||||
relays[relay_count] = optarg;
|
||||
relay_count++;
|
||||
break;
|
||||
default:
|
||||
fprintf(stderr, "Usage: %s -r <recipient> -s <sender> [-R <relay>]... <message>\n", argv[0]);
|
||||
fprintf(stderr, "Options:\n");
|
||||
fprintf(stderr, " -r <recipient>: The recipient's public key (npub or hex)\n");
|
||||
fprintf(stderr, " -s <sender>: The sender's private key (nsec or hex)\n");
|
||||
fprintf(stderr, " -R <relay>: Relay URL to send to (can be specified multiple times)\n");
|
||||
fprintf(stderr, " <message>: The message to send (must be the last argument)\n");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for required arguments
|
||||
if (!recipient_key) {
|
||||
fprintf(stderr, "Error: Recipient key (-r) is required\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Get message from remaining arguments
|
||||
if (optind >= argc) {
|
||||
fprintf(stderr, "Error: Message is required\n");
|
||||
return 1;
|
||||
}
|
||||
message = argv[optind];
|
||||
|
||||
// Use default values if not provided
|
||||
if (!sender_key) {
|
||||
sender_key = DEFAULT_SENDER_NSEC;
|
||||
}
|
||||
if (relay_count == 0) {
|
||||
relays = malloc(sizeof(char*));
|
||||
relays[0] = DEFAULT_RELAY;
|
||||
relay_count = 1;
|
||||
}
|
||||
|
||||
printf("🧪 NIP-17 Private Direct Message Sender\n");
|
||||
printf("======================================\n\n");
|
||||
|
||||
// Initialize crypto
|
||||
if (nostr_init() != NOSTR_SUCCESS) {
|
||||
fprintf(stderr, "Failed to initialize crypto\n");
|
||||
free(relays);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Convert recipient pubkey
|
||||
char recipient_pubkey_hex[65];
|
||||
if (convert_pubkey_to_hex(recipient_key, recipient_pubkey_hex) != 0) {
|
||||
free(relays);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Convert sender private key
|
||||
unsigned char sender_privkey[32];
|
||||
if (convert_nsec_to_private_key(sender_key, sender_privkey) != 0) {
|
||||
free(relays);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Derive sender public key for display
|
||||
unsigned char sender_pubkey_bytes[32];
|
||||
char sender_pubkey_hex[65];
|
||||
if (nostr_ec_public_key_from_private_key(sender_privkey, sender_pubkey_bytes) != 0) {
|
||||
fprintf(stderr, "Failed to derive sender public key\n");
|
||||
return 1;
|
||||
}
|
||||
nostr_bytes_to_hex(sender_pubkey_bytes, 32, sender_pubkey_hex);
|
||||
|
||||
printf("📤 Sender: %s\n", sender_pubkey_hex);
|
||||
printf("📥 Recipient: %s\n", recipient_pubkey_hex);
|
||||
printf("💬 Message: %s\n", message);
|
||||
printf("🌐 Relays: ");
|
||||
for (int i = 0; i < relay_count; i++) {
|
||||
printf("%s", relays[i]);
|
||||
if (i < relay_count - 1) printf(", ");
|
||||
}
|
||||
printf("\n\n");
|
||||
|
||||
// Create DM event
|
||||
printf("💬 Creating DM event...\n");
|
||||
const char* recipient_pubkeys[] = {recipient_pubkey_hex};
|
||||
cJSON* dm_event = nostr_nip17_create_chat_event(
|
||||
message,
|
||||
recipient_pubkeys,
|
||||
1,
|
||||
"NIP-17 CLI", // subject
|
||||
NULL, // no reply
|
||||
relays[0], // relay hint (use first relay)
|
||||
sender_pubkey_hex
|
||||
);
|
||||
|
||||
if (!dm_event) {
|
||||
fprintf(stderr, "Failed to create DM event\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
printf("✅ Created DM event (kind 14)\n");
|
||||
|
||||
// Send DM (create gift wraps)
|
||||
printf("🎁 Creating gift wraps...\n");
|
||||
cJSON* gift_wraps[10]; // Max 10 gift wraps
|
||||
int gift_wrap_count = nostr_nip17_send_dm(
|
||||
dm_event,
|
||||
recipient_pubkeys,
|
||||
1,
|
||||
sender_privkey,
|
||||
gift_wraps,
|
||||
10
|
||||
);
|
||||
|
||||
cJSON_Delete(dm_event); // Original DM event no longer needed
|
||||
|
||||
if (gift_wrap_count <= 0) {
|
||||
fprintf(stderr, "Failed to create gift wraps\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
printf("✅ Created %d gift wrap(s)\n", gift_wrap_count);
|
||||
|
||||
// Publish the gift wrap to relays
|
||||
printf("\n📤 Publishing gift wrap to %d relay(s)...\n", relay_count);
|
||||
|
||||
int success_count = 0;
|
||||
publish_result_t* publish_results = synchronous_publish_event_with_progress(
|
||||
(const char**)relays,
|
||||
relay_count,
|
||||
gift_wraps[0], // Send the first gift wrap
|
||||
&success_count,
|
||||
10, // 10 second timeout
|
||||
publish_progress_callback,
|
||||
NULL, // no user data
|
||||
0, // NIP-42 disabled
|
||||
NULL // no private key for auth
|
||||
);
|
||||
|
||||
if (!publish_results || success_count == 0) {
|
||||
fprintf(stderr, "\n❌ Failed to publish gift wrap to any relay (success_count: %d/%d)\n", success_count, relay_count);
|
||||
// Clean up gift wraps
|
||||
for (int i = 0; i < gift_wrap_count; i++) {
|
||||
cJSON_Delete(gift_wraps[i]);
|
||||
}
|
||||
if (publish_results) free(publish_results);
|
||||
free(relays);
|
||||
return 1;
|
||||
}
|
||||
|
||||
printf("\n✅ Successfully published NIP-17 DM to %d/%d relay(s)!\n", success_count, relay_count);
|
||||
|
||||
// Clean up
|
||||
free(publish_results);
|
||||
for (int i = 0; i < gift_wrap_count; i++) {
|
||||
cJSON_Delete(gift_wraps[i]);
|
||||
}
|
||||
free(relays);
|
||||
|
||||
nostr_cleanup();
|
||||
|
||||
printf("\n🎉 DM sent successfully! The recipient can now decrypt it using their private key.\n");
|
||||
|
||||
return 0;
|
||||
}
|
||||
448
tests/sql_test.sh
Executable file
@@ -0,0 +1,448 @@
|
||||
#!/bin/bash
|
||||
|
||||
# SQL Query Admin API Test Script
|
||||
# Tests the sql_query command functionality
|
||||
|
||||
set -e
|
||||
|
||||
# Configuration
|
||||
RELAY_URL="ws://localhost:8888"
|
||||
ADMIN_PRIVKEY="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
ADMIN_PUBKEY="6a04ab98d9e4774ad806e302dddeb63bea16b5cb5f223ee77478e861bb583eb3"
|
||||
RELAY_PUBKEY="4f355bdcb7cc0af728ef3cceb9615d90684bb5b2ca5f859ab0f0b704075871aa"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Test counters
|
||||
TOTAL_TESTS=0
|
||||
PASSED_TESTS=0
|
||||
FAILED_TESTS=0
|
||||
|
||||
# Helper functions
|
||||
print_test() {
|
||||
echo -e "${YELLOW}TEST: $1${NC}"
|
||||
TOTAL_TESTS=$((TOTAL_TESTS + 1))
|
||||
}
|
||||
|
||||
print_pass() {
|
||||
echo -e "${GREEN}✓ PASS: $1${NC}"
|
||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
||||
}
|
||||
|
||||
print_fail() {
|
||||
echo -e "${RED}✗ FAIL: $1${NC}"
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
}
|
||||
|
||||
# Check if nak is installed
|
||||
check_nak() {
|
||||
if ! command -v nak &> /dev/null; then
|
||||
echo -e "${RED}ERROR: nak command not found. Please install nak first.${NC}"
|
||||
echo -e "${RED}Visit: https://github.com/fiatjaf/nak${NC}"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "${GREEN}✓ nak is available${NC}"
|
||||
}
|
||||
|
||||
# Send SQL query command via WebSocket using nak
|
||||
send_sql_query() {
|
||||
local query="$1"
|
||||
local description="$2"
|
||||
|
||||
echo -n "Testing $description... "
|
||||
|
||||
# Create the admin command
|
||||
COMMAND="[\"sql_query\", \"$query\"]"
|
||||
|
||||
# Encrypt the command using NIP-44
|
||||
ENCRYPTED_COMMAND=$(nak encrypt "$COMMAND" \
|
||||
--sec "$ADMIN_PRIVKEY" \
|
||||
--recipient-pubkey "$RELAY_PUBKEY" 2>/dev/null)
|
||||
|
||||
if [ -z "$ENCRYPTED_COMMAND" ]; then
|
||||
echo -e "${RED}FAILED${NC} - Failed to encrypt admin command"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Create admin event
|
||||
ADMIN_EVENT=$(nak event \
|
||||
--kind 23456 \
|
||||
--content "$ENCRYPTED_COMMAND" \
|
||||
--sec "$ADMIN_PRIVKEY" \
|
||||
--tag "p=$RELAY_PUBKEY" 2>/dev/null)
|
||||
|
||||
if [ -z "$ADMIN_EVENT" ]; then
|
||||
echo -e "${RED}FAILED${NC} - Failed to create admin event"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "=== SENT EVENT ==="
|
||||
echo "$ADMIN_EVENT"
|
||||
echo "==================="
|
||||
|
||||
# Send SQL query event via WebSocket
|
||||
local response
|
||||
response=$(echo "$ADMIN_EVENT" | timeout 10 websocat -B 1048576 "$RELAY_URL" 2>/dev/null | head -3 || echo 'TIMEOUT')
|
||||
|
||||
echo "=== RECEIVED RESPONSE ==="
|
||||
echo "$response"
|
||||
echo "=========================="
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
echo -e "${RED}FAILED${NC} - Connection timeout"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$response" # Return the response for further processing
|
||||
}
|
||||
|
||||
# Test functions
|
||||
test_valid_select() {
|
||||
print_test "Valid SELECT query"
|
||||
local response=$(send_sql_query "SELECT * FROM events LIMIT 1" "valid SELECT query")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"query_type":"sql_query"' && echo "$response" | grep -q '"row_count"'; then
|
||||
print_pass "Valid SELECT accepted and executed"
|
||||
else
|
||||
print_fail "Valid SELECT failed: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_select_count() {
|
||||
print_test "SELECT COUNT(*) query"
|
||||
local response=$(send_sql_query "SELECT COUNT(*) FROM events" "COUNT query")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"query_type":"sql_query"' && echo "$response" | grep -q '"row_count"'; then
|
||||
print_pass "COUNT query executed successfully"
|
||||
else
|
||||
print_fail "COUNT query failed: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_blocked_insert() {
|
||||
print_test "INSERT statement blocked"
|
||||
local response=$(send_sql_query "INSERT INTO events VALUES ('id', 'pubkey', 1234567890, 1, 'content', 'sig')" "INSERT blocking")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||
print_pass "INSERT correctly blocked"
|
||||
else
|
||||
print_fail "INSERT not blocked: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_blocked_update() {
|
||||
print_test "UPDATE statement blocked"
|
||||
local response=$(send_sql_query "UPDATE events SET content = 'test' WHERE id = 'abc123'" "UPDATE blocking")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||
print_pass "UPDATE correctly blocked"
|
||||
else
|
||||
print_fail "UPDATE not blocked: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_blocked_delete() {
|
||||
print_test "DELETE statement blocked"
|
||||
local response=$(send_sql_query "DELETE FROM events WHERE id = 'abc123'" "DELETE blocking")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||
print_pass "DELETE correctly blocked"
|
||||
else
|
||||
print_fail "DELETE not blocked: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_blocked_drop() {
|
||||
print_test "DROP statement blocked"
|
||||
local response=$(send_sql_query "DROP TABLE events" "DROP blocking")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||
print_pass "DROP correctly blocked"
|
||||
else
|
||||
print_fail "DROP not blocked: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_blocked_create() {
|
||||
print_test "CREATE statement blocked"
|
||||
local response=$(send_sql_query "CREATE TABLE test (id TEXT)" "CREATE blocking")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||
print_pass "CREATE correctly blocked"
|
||||
else
|
||||
print_fail "CREATE not blocked: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_blocked_alter() {
|
||||
print_test "ALTER statement blocked"
|
||||
local response=$(send_sql_query "ALTER TABLE events ADD COLUMN test TEXT" "ALTER blocking")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||
print_pass "ALTER correctly blocked"
|
||||
else
|
||||
print_fail "ALTER not blocked: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_blocked_pragma() {
|
||||
print_test "PRAGMA statement blocked"
|
||||
local response=$(send_sql_query "PRAGMA table_info(events)" "PRAGMA blocking")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||
print_pass "PRAGMA correctly blocked"
|
||||
else
|
||||
print_fail "PRAGMA not blocked: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_select_with_where() {
|
||||
print_test "SELECT with WHERE clause"
|
||||
local response=$(send_sql_query "SELECT id, kind FROM events WHERE kind = 1 LIMIT 5" "WHERE clause query")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"query_type":"sql_query"'; then
|
||||
print_pass "WHERE clause query executed"
|
||||
else
|
||||
print_fail "WHERE clause query failed: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_select_with_join() {
|
||||
print_test "SELECT with JOIN"
|
||||
local response=$(send_sql_query "SELECT e.id, e.kind, s.events_sent FROM events e LEFT JOIN active_subscriptions_log s ON e.id = s.subscription_id LIMIT 3" "JOIN query")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"query_type":"sql_query"'; then
|
||||
print_pass "JOIN query executed"
|
||||
else
|
||||
print_fail "JOIN query failed: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_select_views() {
|
||||
print_test "SELECT from views"
|
||||
local response=$(send_sql_query "SELECT * FROM event_kinds_view LIMIT 5" "view query")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"query_type":"sql_query"'; then
|
||||
print_pass "View query executed"
|
||||
else
|
||||
print_fail "View query failed: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_nonexistent_table() {
|
||||
print_test "Query nonexistent table"
|
||||
local response=$(send_sql_query "SELECT * FROM nonexistent_table" "nonexistent table")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"status":"error"'; then
|
||||
print_pass "Nonexistent table error handled correctly"
|
||||
else
|
||||
print_fail "Nonexistent table error not handled: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_invalid_syntax() {
|
||||
print_test "Invalid SQL syntax"
|
||||
local response=$(send_sql_query "SELECT * FROM events WHERE" "invalid syntax")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"status":"error"'; then
|
||||
print_pass "Invalid syntax error handled"
|
||||
else
|
||||
print_fail "Invalid syntax not handled: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_request_id_correlation() {
|
||||
print_test "Request ID correlation"
|
||||
local response=$(send_sql_query "SELECT * FROM events LIMIT 1" "request ID correlation")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"request_id"'; then
|
||||
print_pass "Request ID included in response"
|
||||
else
|
||||
print_fail "Request ID missing from response: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_response_format() {
|
||||
print_test "Response format validation"
|
||||
local response=$(send_sql_query "SELECT * FROM events LIMIT 1" "response format")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"query_type":"sql_query"' &&
|
||||
echo "$response" | grep -q '"timestamp"' &&
|
||||
echo "$response" | grep -q '"execution_time_ms"' &&
|
||||
echo "$response" | grep -q '"row_count"' &&
|
||||
echo "$response" | grep -q '"columns"' &&
|
||||
echo "$response" | grep -q '"rows"'; then
|
||||
print_pass "Response format is valid"
|
||||
else
|
||||
print_fail "Response format invalid: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
test_empty_result() {
|
||||
print_test "Empty result set"
|
||||
local response=$(send_sql_query "SELECT * FROM events WHERE kind = 99999" "empty result")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
return 1
|
||||
fi
|
||||
|
||||
if echo "$response" | grep -q '"query_type":"sql_query"'; then
|
||||
print_pass "Empty result handled correctly"
|
||||
else
|
||||
print_fail "Empty result not handled: $response"
|
||||
fi
|
||||
}
|
||||
|
||||
echo "=========================================="
|
||||
echo "C-Relay SQL Query Admin API Testing Suite"
|
||||
echo "=========================================="
|
||||
echo "Testing SQL query functionality at $RELAY_URL"
|
||||
echo ""
|
||||
|
||||
# Check prerequisites
|
||||
check_nak
|
||||
|
||||
# Test basic connectivity first
|
||||
echo "=== Basic Connectivity Test ==="
|
||||
print_test "Basic connectivity"
|
||||
response=$(send_sql_query "SELECT 1" "basic connectivity")
|
||||
|
||||
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||
echo -e "${RED}FAILED${NC} - Cannot connect to relay at $RELAY_URL"
|
||||
echo "Make sure the relay is running and accessible."
|
||||
exit 1
|
||||
else
|
||||
print_pass "Relay connection established"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Run test suites
|
||||
echo "=== Query Validation Tests ==="
|
||||
test_valid_select
|
||||
test_select_count
|
||||
test_blocked_insert
|
||||
test_blocked_update
|
||||
test_blocked_delete
|
||||
test_blocked_drop
|
||||
test_blocked_create
|
||||
test_blocked_alter
|
||||
test_blocked_pragma
|
||||
echo ""
|
||||
|
||||
echo "=== Query Execution Tests ==="
|
||||
test_select_with_where
|
||||
test_select_with_join
|
||||
test_select_views
|
||||
test_empty_result
|
||||
echo ""
|
||||
|
||||
echo "=== Error Handling Tests ==="
|
||||
test_nonexistent_table
|
||||
test_invalid_syntax
|
||||
echo ""
|
||||
|
||||
echo "=== Response Format Tests ==="
|
||||
test_request_id_correlation
|
||||
test_response_format
|
||||
echo ""
|
||||
|
||||
echo "=== Test Results ==="
|
||||
echo "Total tests: $TOTAL_TESTS"
|
||||
echo -e "Passed: ${GREEN}$PASSED_TESTS${NC}"
|
||||
echo -e "Failed: ${RED}$FAILED_TESTS${NC}"
|
||||
|
||||
if [[ $FAILED_TESTS -eq 0 ]]; then
|
||||
echo -e "${GREEN}✓ All SQL query tests passed!${NC}"
|
||||
echo "SQL query admin API is working correctly."
|
||||
exit 0
|
||||
else
|
||||
echo -e "${RED}✗ Some SQL query tests failed!${NC}"
|
||||
echo "SQL query admin API may have issues."
|
||||
exit 1
|
||||
fi
|
||||
295
tests/subscription_cleanup_test.sh
Executable file
@@ -0,0 +1,295 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Subscription Cleanup Testing Suite for C-Relay
|
||||
# Tests startup cleanup and connection age limit features
|
||||
|
||||
set -e
|
||||
|
||||
# Load test keys
|
||||
source "$(dirname "$0")/.test_keys.txt"
|
||||
|
||||
# Configuration
|
||||
RELAY_HOST="127.0.0.1"
|
||||
RELAY_PORT="8888"
|
||||
RELAY_URL="ws://${RELAY_HOST}:${RELAY_PORT}"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Test counters
|
||||
TOTAL_TESTS=0
|
||||
PASSED_TESTS=0
|
||||
FAILED_TESTS=0
|
||||
|
||||
echo -e "${BLUE}========================================${NC}"
|
||||
echo -e "${BLUE}Subscription Cleanup Test Suite${NC}"
|
||||
echo -e "${BLUE}========================================${NC}"
|
||||
echo ""
|
||||
|
||||
# Function to print test header
|
||||
print_test_header() {
|
||||
echo -e "${BLUE}=== Test $1: $2 ===${NC}"
|
||||
}
|
||||
|
||||
# Function to print test result
|
||||
print_result() {
|
||||
local status=$1
|
||||
local message=$2
|
||||
|
||||
TOTAL_TESTS=$((TOTAL_TESTS + 1))
|
||||
|
||||
if [ "$status" = "PASS" ]; then
|
||||
echo -e "${GREEN}[PASS]${NC} $message"
|
||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
||||
elif [ "$status" = "FAIL" ]; then
|
||||
echo -e "${RED}[FAIL]${NC} $message"
|
||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||
else
|
||||
echo -e "${YELLOW}[WARN]${NC} $message"
|
||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to check if relay is running
|
||||
check_relay_running() {
|
||||
# Send a simple REQ and check for EOSE response
|
||||
local response=$(echo '["REQ","ping",{}]' | timeout 2 websocat -n1 "$RELAY_URL" 2>/dev/null)
|
||||
if echo "$response" | grep -q "EOSE\|EVENT"; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to create a subscription
|
||||
create_subscription() {
|
||||
local sub_id=$1
|
||||
local filter=${2:-"{}"}
|
||||
|
||||
echo "[\"REQ\",\"$sub_id\",$filter]" | timeout 5 websocat -n1 "$RELAY_URL" 2>/dev/null || echo "TIMEOUT"
|
||||
}
|
||||
|
||||
# Function to close a subscription
|
||||
close_subscription() {
|
||||
local sub_id=$1
|
||||
|
||||
echo "[\"CLOSE\",\"$sub_id\"]" | timeout 5 websocat -n1 "$RELAY_URL" 2>/dev/null || echo "TIMEOUT"
|
||||
}
|
||||
|
||||
# Function to query subscription count from database
|
||||
get_subscription_count() {
|
||||
local db_file=$(find . -name "*.db" -type f 2>/dev/null | head -1)
|
||||
|
||||
if [ -z "$db_file" ]; then
|
||||
echo "0"
|
||||
return
|
||||
fi
|
||||
|
||||
sqlite3 "$db_file" "SELECT COUNT(*) FROM subscriptions WHERE event_type='created' AND ended_at IS NULL;" 2>/dev/null || echo "0"
|
||||
}
|
||||
|
||||
# Test 1: Basic Connectivity
|
||||
print_test_header "1" "Basic Connectivity"
|
||||
|
||||
if check_relay_running; then
|
||||
print_result "PASS" "Relay is running and accepting connections"
|
||||
else
|
||||
print_result "FAIL" "Cannot connect to relay at $RELAY_URL"
|
||||
echo ""
|
||||
echo -e "${RED}ERROR: Relay must be running for tests to proceed${NC}"
|
||||
exit 1
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 2: Create Multiple Subscriptions
|
||||
print_test_header "2" "Create Multiple Subscriptions"
|
||||
|
||||
echo "[INFO] Creating 5 test subscriptions..."
|
||||
for i in {1..5}; do
|
||||
response=$(create_subscription "cleanup_test_$i")
|
||||
if echo "$response" | grep -q "EOSE"; then
|
||||
echo "[INFO] Subscription cleanup_test_$i created successfully"
|
||||
else
|
||||
print_result "WARN" "Subscription cleanup_test_$i may not have been created: $response"
|
||||
fi
|
||||
done
|
||||
|
||||
# Give subscriptions time to be logged
|
||||
sleep 2
|
||||
|
||||
# Check subscription count in database
|
||||
active_subs=$(get_subscription_count)
|
||||
echo "[INFO] Active subscriptions in database: $active_subs"
|
||||
|
||||
if [ "$active_subs" -ge 5 ]; then
|
||||
print_result "PASS" "Multiple subscriptions created and logged ($active_subs active)"
|
||||
else
|
||||
print_result "WARN" "Expected at least 5 subscriptions, found $active_subs"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 3: Simulate Orphaned Subscriptions (disconnect without CLOSE)
|
||||
print_test_header "3" "Simulate Orphaned Subscriptions"
|
||||
|
||||
echo "[INFO] Creating subscriptions and disconnecting abruptly..."
|
||||
|
||||
# Create subscriptions in background and kill the connection
|
||||
for i in {6..10}; do
|
||||
(echo "[\"REQ\",\"orphan_test_$i\",{}]" | timeout 2 websocat "$RELAY_URL" &>/dev/null) &
|
||||
pid=$!
|
||||
sleep 0.5
|
||||
kill -9 $pid 2>/dev/null || true
|
||||
done
|
||||
|
||||
sleep 2
|
||||
|
||||
orphaned_subs=$(get_subscription_count)
|
||||
echo "[INFO] Subscriptions after abrupt disconnects: $orphaned_subs"
|
||||
|
||||
if [ "$orphaned_subs" -gt "$active_subs" ]; then
|
||||
print_result "PASS" "Orphaned subscriptions detected ($orphaned_subs total, was $active_subs)"
|
||||
else
|
||||
print_result "WARN" "No increase in orphaned subscriptions detected"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 4: Startup Cleanup (requires relay restart)
|
||||
print_test_header "4" "Startup Cleanup Feature"
|
||||
|
||||
echo "[INFO] This test requires relay restart to verify startup cleanup"
|
||||
echo "[INFO] Current orphaned subscriptions: $orphaned_subs"
|
||||
echo ""
|
||||
echo -e "${YELLOW}[ACTION REQUIRED]${NC} Please restart the relay now with:"
|
||||
echo " ./make_and_restart_relay.sh"
|
||||
echo ""
|
||||
echo -n "Press Enter after relay has restarted to continue..."
|
||||
read
|
||||
|
||||
# Wait for relay to be ready
|
||||
echo "[INFO] Waiting for relay to be ready..."
|
||||
sleep 3
|
||||
|
||||
if ! check_relay_running; then
|
||||
print_result "FAIL" "Relay not responding after restart"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if orphaned subscriptions were cleaned up
|
||||
cleaned_subs=$(get_subscription_count)
|
||||
echo "[INFO] Active subscriptions after restart: $cleaned_subs"
|
||||
|
||||
if [ "$cleaned_subs" -eq 0 ]; then
|
||||
print_result "PASS" "Startup cleanup removed all orphaned subscriptions"
|
||||
elif [ "$cleaned_subs" -lt "$orphaned_subs" ]; then
|
||||
print_result "PASS" "Startup cleanup reduced orphaned subscriptions (from $orphaned_subs to $cleaned_subs)"
|
||||
else
|
||||
print_result "FAIL" "Startup cleanup did not reduce orphaned subscriptions"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 5: Connection Age Limit (requires configuration)
|
||||
print_test_header "5" "Connection Age Limit Feature"
|
||||
|
||||
echo "[INFO] Testing connection age limit feature..."
|
||||
echo "[INFO] Default max_connection_seconds is 86400 (24 hours)"
|
||||
echo ""
|
||||
echo -e "${YELLOW}[INFO]${NC} To test connection age limit with shorter timeout:"
|
||||
echo " 1. Set max_connection_seconds to 60 (1 minute) via admin event"
|
||||
echo " 2. Create a subscription and wait 61 seconds"
|
||||
echo " 3. Connection should be automatically closed"
|
||||
echo ""
|
||||
echo "[INFO] For this test, we'll verify the feature is enabled in config"
|
||||
|
||||
# Create a test subscription to verify connection works
|
||||
response=$(create_subscription "age_test_1")
|
||||
if echo "$response" | grep -q "EOSE"; then
|
||||
print_result "PASS" "Connection age limit feature is operational (subscription created)"
|
||||
else
|
||||
print_result "WARN" "Could not verify connection age limit feature"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 6: Verify Client Reconnection
|
||||
print_test_header "6" "Client Reconnection After Cleanup"
|
||||
|
||||
echo "[INFO] Testing that clients can reconnect after cleanup..."
|
||||
|
||||
# Create a subscription
|
||||
response=$(create_subscription "reconnect_test_1")
|
||||
if echo "$response" | grep -q "EOSE"; then
|
||||
echo "[INFO] First connection successful"
|
||||
|
||||
# Close and reconnect
|
||||
sleep 1
|
||||
response=$(create_subscription "reconnect_test_2")
|
||||
if echo "$response" | grep -q "EOSE"; then
|
||||
print_result "PASS" "Client can reconnect and create new subscriptions"
|
||||
else
|
||||
print_result "FAIL" "Client reconnection failed"
|
||||
fi
|
||||
else
|
||||
print_result "FAIL" "Initial connection failed"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 7: Verify Disabled State (max_connection_seconds = 0)
|
||||
print_test_header "7" "Verify Feature Can Be Disabled"
|
||||
|
||||
echo "[INFO] Connection age limit can be disabled by setting max_connection_seconds=0"
|
||||
echo "[INFO] When disabled, connections remain open indefinitely"
|
||||
echo "[INFO] This is the recommended setting for most relays"
|
||||
|
||||
# Create a long-lived subscription
|
||||
response=$(create_subscription "disabled_test_1")
|
||||
if echo "$response" | grep -q "EOSE"; then
|
||||
print_result "PASS" "Subscriptions work normally when feature is disabled/default"
|
||||
else
|
||||
print_result "WARN" "Could not verify disabled state"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 8: Database Integrity Check
|
||||
print_test_header "8" "Database Integrity After Cleanup"
|
||||
|
||||
echo "[INFO] Checking database integrity..."
|
||||
|
||||
db_file=$(find . -name "*.db" -type f 2>/dev/null | head -1)
|
||||
if [ -n "$db_file" ]; then
|
||||
# Check if database is accessible
|
||||
if sqlite3 "$db_file" "PRAGMA integrity_check;" 2>/dev/null | grep -q "ok"; then
|
||||
print_result "PASS" "Database integrity check passed"
|
||||
else
|
||||
print_result "FAIL" "Database integrity check failed"
|
||||
fi
|
||||
|
||||
# Check subscription table structure
|
||||
if sqlite3 "$db_file" "SELECT COUNT(*) FROM subscriptions;" &>/dev/null; then
|
||||
print_result "PASS" "Subscription table is accessible"
|
||||
else
|
||||
print_result "FAIL" "Subscription table is not accessible"
|
||||
fi
|
||||
else
|
||||
print_result "WARN" "No database file found"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Final Summary
|
||||
echo -e "${BLUE}========================================${NC}"
|
||||
echo -e "${BLUE}Test Summary${NC}"
|
||||
echo -e "${BLUE}========================================${NC}"
|
||||
echo "Total Tests: $TOTAL_TESTS"
|
||||
echo -e "${GREEN}Passed: $PASSED_TESTS${NC}"
|
||||
echo -e "${RED}Failed: $FAILED_TESTS${NC}"
|
||||
echo ""
|
||||
|
||||
if [ $FAILED_TESTS -eq 0 ]; then
|
||||
echo -e "${GREEN}All tests passed!${NC}"
|
||||
exit 0
|
||||
else
|
||||
echo -e "${RED}Some tests failed. Please review the output above.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
747
tests/test_requests.mjs
Normal file
@@ -0,0 +1,747 @@
|
||||
|
||||
/**
|
||||
* Nostr Relay Pubkey Filter Test
|
||||
* Tests how many pubkeys different relays can handle in a single filter request
|
||||
*/
|
||||
|
||||
import { WebSocket } from 'ws';
|
||||
|
||||
// Configuration
|
||||
const RELAYS = [
|
||||
// "wss://relay.laantungir.net"
|
||||
"ws://127.0.0.1:8888"
|
||||
];
|
||||
|
||||
// Test parameters
|
||||
const STEP_SIZE = 25; // Increment pubkey count by 25 each test
|
||||
const MAX_PUBKEYS = 500; // Maximum pubkeys to test
|
||||
const EVENT_KIND = 1; // Kind 1 = text notes
|
||||
const EVENT_LIMIT = 2; // Only request 2 events per test
|
||||
|
||||
// Generate test pubkey arrays of increasing sizes
|
||||
function generateTestPubkeyArrays() {
|
||||
const testArrays = [];
|
||||
for (let count = STEP_SIZE; count <= MAX_PUBKEYS; count += STEP_SIZE) {
|
||||
testArrays.push(PUBKEYS.slice(0, count));
|
||||
}
|
||||
return testArrays;
|
||||
}
|
||||
|
||||
const PUBKEYS = [
|
||||
"85080d3bad70ccdcd7f74c29a44f55bb85cbcd3dd0cbb957da1d215bdb931204",
|
||||
"82341f882b6eabcd2ba7f1ef90aad961cf074af15b9ef44a09f9d2a8fbfbe6a2",
|
||||
"916b7aca250f43b9f842faccc831db4d155088632a8c27c0d140f2043331ba57",
|
||||
"2645caf5706a31767c921532975a079f85950e1006bd5065f5dd0213e6848a96",
|
||||
"8fe3f243e91121818107875d51bca4f3fcf543437aa9715150ec8036358939c5",
|
||||
"83e818dfbeccea56b0f551576b3fd39a7a50e1d8159343500368fa085ccd964b",
|
||||
"a341f45ff9758f570a21b000c17d4e53a3a497c8397f26c0e6d61e5acffc7a98",
|
||||
"e88a691e98d9987c964521dff60025f60700378a4879180dcbbb4a5027850411",
|
||||
"2cde0e02bda47eaeeed65e341619cc5f2afce990164669da4e1e5989180a96b9",
|
||||
"edcd20558f17d99327d841e4582f9b006331ac4010806efa020ef0d40078e6da",
|
||||
"34d2f5274f1958fcd2cb2463dabeaddf8a21f84ace4241da888023bf05cc8095",
|
||||
"c48b5cced5ada74db078df6b00fa53fc1139d73bf0ed16de325d52220211dbd5",
|
||||
"04c915daefee38317fa734444acee390a8269fe5810b2241e5e6dd343dfbecc9",
|
||||
"e33fe65f1fde44c6dc17eeb38fdad0fceaf1cae8722084332ed1e32496291d42",
|
||||
"1306edd66f1da374adc417cf884bbcff57c6399656236c1f872ee10403c01b2d",
|
||||
"eaf27aa104833bcd16f671488b01d65f6da30163b5848aea99677cc947dd00aa",
|
||||
"472f440f29ef996e92a186b8d320ff180c855903882e59d50de1b8bd5669301e",
|
||||
"be1d89794bf92de5dd64c1e60f6a2c70c140abac9932418fee30c5c637fe9479",
|
||||
"c49d52a573366792b9a6e4851587c28042fb24fa5625c6d67b8c95c8751aca15",
|
||||
"c037a6897df86bfd4df5496ca7e2318992b4766897fb18fbd1d347a4f4459f5e",
|
||||
"e41e883f1ef62485a074c1a1fa1d0a092a5d678ad49bedc2f955ab5e305ba94e",
|
||||
"020f2d21ae09bf35fcdfb65decf1478b846f5f728ab30c5eaabcd6d081a81c3e",
|
||||
"e2f28c1ac6dff5a7b755635af4c8436d2fec89b888a9d9548a51b2c63f779555",
|
||||
"29fbc05acee671fb579182ca33b0e41b455bb1f9564b90a3d8f2f39dee3f2779",
|
||||
"090254801a7e8e5085b02e711622f0dfa1a85503493af246aa42af08f5e4d2df",
|
||||
"3bf0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d",
|
||||
"6b0d4c8d9dc59e110d380b0429a02891f1341a0fa2ba1b1cf83a3db4d47e3964",
|
||||
"b0b8fbd9578ac23e782d97a32b7b3a72cda0760761359bd65661d42752b4090a",
|
||||
"b7996c183e036df27802945b80bbdc8b0bf5971b6621a86bf3569c332117f07d",
|
||||
"1833ee04459feb2ca4ae690d5f31269ad488c69e5fe903a42b532c677c4a8170",
|
||||
"4adb4ff2dc72bbf1f6da19fc109008a25013c837cf712016972fad015b19513f",
|
||||
"c4eabae1be3cf657bc1855ee05e69de9f059cb7a059227168b80b89761cbc4e0",
|
||||
"368f4e0027fd223fdb69b6ec6e1c06d1f027a611b1ed38eeb32493eb2878bb35",
|
||||
"703e26b4f8bc0fa57f99d815dbb75b086012acc24fc557befa310f5aa08d1898",
|
||||
"50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63",
|
||||
"6e1534f56fc9e937e06237c8ba4b5662bcacc4e1a3cfab9c16d89390bec4fca3",
|
||||
"a5e93aef8e820cbc7ab7b6205f854b87aed4b48c5f6b30fbbeba5c99e40dcf3f",
|
||||
"1989034e56b8f606c724f45a12ce84a11841621aaf7182a1f6564380b9c4276b",
|
||||
"19fefd7f39c96d2ff76f87f7627ae79145bc971d8ab23205005939a5a913bc2f",
|
||||
"6e468422dfb74a5738702a8823b9b28168abab8655faacb6853cd0ee15deee93",
|
||||
"a3b0ce5d70d0db22885706b2b1f144c6864a7e4828acff3f8f01ca6b3f54ad15",
|
||||
"aef0d6b212827f3ba1de6189613e6d4824f181f567b1205273c16895fdaf0b23",
|
||||
"826e9f895b81ab41a4522268b249e68d02ca81608def562a493cee35ffc5c759",
|
||||
"460c25e682fda7832b52d1f22d3d22b3176d972f60dcdc3212ed8c92ef85065c",
|
||||
"e1055729d51e037b3c14e8c56e2c79c22183385d94aadb32e5dc88092cd0fef4",
|
||||
"27f211f4542fd89d673cfad15b6d838cc5d525615aae8695ed1dcebc39b2dadb",
|
||||
"eab0e756d32b80bcd464f3d844b8040303075a13eabc3599a762c9ac7ab91f4f",
|
||||
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245",
|
||||
"63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed",
|
||||
"00000000827ffaa94bfea288c3dfce4422c794fbb96625b6b31e9049f729d700",
|
||||
"7fa56f5d6962ab1e3cd424e758c3002b8665f7b0d8dcee9fe9e288d7751ac194",
|
||||
"22aa81510ee63fe2b16cae16e0921f78e9ba9882e2868e7e63ad6d08ae9b5954",
|
||||
"175f568d77fb0cb7400f0ddd8aed1738cd797532b314ef053a1669d4dba7433a",
|
||||
"6c535d95a8659b234d5a0805034f5f0a67e3c0ceffcc459f61f680fe944424bf",
|
||||
"9579444852221038dcba34512257b66a1c6e5bdb4339b6794826d4024b3e4ce9",
|
||||
"58c741aa630c2da35a56a77c1d05381908bd10504fdd2d8b43f725efa6d23196",
|
||||
"b8e6bf46e109314616fe24e6c7e265791a5f2f4ec95ae8aa15d7107ad250dc63",
|
||||
"84dee6e676e5bb67b4ad4e042cf70cbd8681155db535942fcc6a0533858a7240",
|
||||
"387519cafd325668ecffe59577f37238638da4cf2d985b82f932fc81d33da1e8",
|
||||
"b9e76546ba06456ed301d9e52bc49fa48e70a6bf2282be7a1ae72947612023dc",
|
||||
"4d62dd5e6ac55ae2405940f59f6f030a994ec2b3ecc5556c8dc542cce20e46dd",
|
||||
"9c612f8b770f0e3fd35cdac2bc57fcee8561e560504ea25c8b9eff8e03512b3e",
|
||||
"3eeb3de14ec5c48c6c4c9ff80908c4186170eabb74b2a6705a7db9f9922cd61e",
|
||||
"51d7f1b736d1958fa56f113e82a27987a3aca4f0e6d237fa8fc369cc1608c5c0",
|
||||
"c2622c916d9b90e10a81b2ba67b19bdfc5d6be26c25756d1f990d3785ce1361b",
|
||||
"b111d517452f9ef015e16d60ae623a6b66af63024eec941b0653bfee0dd667d4",
|
||||
"d897efcd971f8e5eae08c86b7da66f89b30e761a4a86ac41d907425d15b630fe",
|
||||
"9dea27855974a08fceb48c40fab8432c1a8e3a53a1da22a1ad568595d6010649",
|
||||
"47b630bbcdfa88b1c85f84aa3b68fe6c0102b651ba5d9a23cbd2d07b4f6eecc1",
|
||||
"eb0dc09a61fdfc0df5db1f20c7fc7d83f00c690580fea2e5bac8f99c13f65065",
|
||||
"5c0775b1ae0a5140da9599aa9cd1c5beea55c2d55a5d681808525eb5fce37b32",
|
||||
"b474e6999980aa9e8c9dd6e3720fb03136bfa05aba5fab1634dc0bd8767d412f",
|
||||
"759f7abf05ca710bf2c8da7ad7a9a7df6d0c85db7b2217da524e94e3627b2fbd",
|
||||
"060e7c6ed0dbeb9c8cdc61445ee38b9b08d899d6b617e28064b0916e243ddddb",
|
||||
"f728d9e6e7048358e70930f5ca64b097770d989ccd86854fe618eda9c8a38106",
|
||||
"bf2376e17ba4ec269d10fcc996a4746b451152be9031fa48e74553dde5526bce",
|
||||
"b99dbca0184a32ce55904cb267b22e434823c97f418f36daf5d2dff0dd7b5c27",
|
||||
"c7dccba4fe4426a7b1ea239a5637ba40fab9862c8c86b3330fe65e9f667435f6",
|
||||
"ad46db12ee250a108756ab4f0f3007b04d7e699f45eac3ab696077296219d207",
|
||||
"59fbee7369df7713dbbfa9bbdb0892c62eba929232615c6ff2787da384cb770f",
|
||||
"d7f0e3917c466f1e2233e9624fbd6d4bd1392dbcfcaf3574f457569d496cb731",
|
||||
"e9e4276490374a0daf7759fd5f475deff6ffb9b0fc5fa98c902b5f4b2fe3bba2",
|
||||
"6f35047caf7432fc0ab54a28fed6c82e7b58230bf98302bf18350ff71e10430a",
|
||||
"fdd5e8f6ae0db817be0b71da20498c1806968d8a6459559c249f322fa73464a7",
|
||||
"883fea4c071fda4406d2b66be21cb1edaf45a3e058050d6201ecf1d3596bbc39",
|
||||
"a1808558470389142e297d4729e081ab8bdff1ab50d0ebe22ffa78958f7a6ab7",
|
||||
"330fb1431ff9d8c250706bbcdc016d5495a3f744e047a408173e92ae7ee42dac",
|
||||
"a4cb51f4618cfcd16b2d3171c466179bed8e197c43b8598823b04de266cef110",
|
||||
"9c163c7351f8832b08b56cbb2e095960d1c5060dd6b0e461e813f0f07459119e",
|
||||
"0a722ca20e1ccff0adfdc8c2abb097957f0e0bf32db18c4281f031756d50eb8d",
|
||||
"5cad82c898ee66013711945d687f7d9549f645a0118467dae2f5e274c598d6ff",
|
||||
"03b593ef3d95102b54bdff77728131a7c3bdfe9007b0b92cd7c4ad4a0021de25",
|
||||
"d0debf9fb12def81f43d7c69429bb784812ac1e4d2d53a202db6aac7ea4b466c",
|
||||
"60d53675f07dee9e7d77910efa44682d87cb532313ba66b8f4449d649172296b",
|
||||
"d3ab33199eb48c6f785072b4a66a8e57814e35d31375cca8c3ceeecc171f30ba",
|
||||
"772bd267dffbff318d1a89f257c3371410111a8b89571dbbefa77af6bfa179f3",
|
||||
"11b9a89404dbf3034e7e1886ba9dc4c6d376f239a118271bd2ec567a889850ce",
|
||||
"0497384b57b43c107a778870462901bf68e0e8583b32e2816563543c059784a4",
|
||||
"5d9ba2c5ee0e86e2c4477b145eb301f2df06063a19f4c4ab9042bd347188ec8e",
|
||||
"5683ffc7ff8a732565135aad56cdff94ebacd9a616d1313aea8ad48a446bfe99",
|
||||
"3004d45a0ab6352c61a62586a57c50f11591416c29db1143367a4f0623b491ca",
|
||||
"b24e32ee9a1c18f2771b53345ed8dbc55b59cbe958e5a165dc01704c3aaa6196",
|
||||
"0a2df905acd5b5be3214a84cb2d4f61b0efb4d9bf05739d51112252504959688",
|
||||
"95361a2b42a26c22bac3b6b6ba4c5cac4d36906eb0cfb98268681c45a301c518",
|
||||
"b07d216f2f0422ec0252dd81a6513b8d0b0c7ef85291fbf5a85ef23f8df78fa7",
|
||||
"064de2497ce621aee2a5b4b926a08b1ca01bce9da85b0c714e883e119375140c",
|
||||
"5a8e581f16a012e24d2a640152ad562058cb065e1df28e907c1bfa82c150c8ba",
|
||||
"a36bdc7952e973b31cb32d4ce3ce21447db66c3149c1b7a3d2450f77f9c7e8f9",
|
||||
"e03cfe011d81424bb60a12e9eb0cb0c9c688c34712c3794c0752e0718b369ef2",
|
||||
"2edbcea694d164629854a52583458fd6d965b161e3c48b57d3aff01940558884",
|
||||
"b9003833fabff271d0782e030be61b7ec38ce7d45a1b9a869fbdb34b9e2d2000",
|
||||
"4379e76bfa76a80b8db9ea759211d90bb3e67b2202f8880cc4f5ffe2065061ad",
|
||||
"76c71aae3a491f1d9eec47cba17e229cda4113a0bbb6e6ae1776d7643e29cafa",
|
||||
"d307643547703537dfdef811c3dea96f1f9e84c8249e200353425924a9908cf8",
|
||||
"da0cc82154bdf4ce8bf417eaa2d2fa99aa65c96c77867d6656fccdbf8e781b18",
|
||||
"3511ad63cd9ad760780044b7c815ee55e8e00722b5de271c47ff29367653456c",
|
||||
"f9acb0b034c4c1177e985f14639f317ef0fedee7657c060b146ee790024317ec",
|
||||
"0c371f5ed95076613443e8331c4b60828ed67bcdefaa1698fb5ce9d7b3285ffb",
|
||||
"ee11a5dff40c19a555f41fe42b48f00e618c91225622ae37b6c2bb67b76c4e49",
|
||||
"053935081a69624466034446eda3374d905652ddbf8217c88708182687a33066",
|
||||
"a305cc8926861bdde5c71bbb6fd394bb4cea6ef5f5f86402b249fc5ceb0ce220",
|
||||
"03a6e50be223dbb49e282764388f6f2ca8826eae8c5a427aa82bb1b61e51d5e6",
|
||||
"a197639863cf175adc96348382a73b4a4a361c6b2e6fc1de61a14244a2f926a1",
|
||||
"3ca7ca157b5975ace02225caf99fdce43f11207c072cb4899c80a414a9c7539d",
|
||||
"02d9f5676fffc339ffe94dfab38bebe21ce117c6f1509d9922a82d454f420da2",
|
||||
"08634a74c9d14479b462389b307695815f9a189e8fb6e058b92e18bd3f537405",
|
||||
"ec7de4aa8758ba9e09a8c89d2757a1fa0e2cc61c20b757af52ae058931c1a33f",
|
||||
"2250f69694c2a43929e77e5de0f6a61ae5e37a1ee6d6a3baef1706ed9901248b",
|
||||
"a9434ee165ed01b286becfc2771ef1705d3537d051b387288898cc00d5c885be",
|
||||
"bd625f1b8c49a79f075f3ebd2d111ff625504cf2ad12442fd70d191dd2f4a562",
|
||||
"25e5c82273a271cb1a840d0060391a0bf4965cafeb029d5ab55350b418953fbb",
|
||||
"42224859763652914db53052103f0b744df79dfc4efef7e950fc0802fc3df3c5",
|
||||
"11d0b66747887ba9a6d34b23eb31287374b45b1a1b161eac54cb183c53e00ef7",
|
||||
"2544cfcb89d7c2f8d3a31ea2ed386ac5189a18f484672436580eec215f9b039c",
|
||||
"d4338b7c3306491cfdf54914d1a52b80a965685f7361311eae5f3eaff1d23a5b",
|
||||
"c43e382ee4835010b9fad18e0a6f50f1ae143b98e089b8bb974232fce4d1f295",
|
||||
"92de68b21302fa2137b1cbba7259b8ba967b535a05c6d2b0847d9f35ff3cf56a",
|
||||
"55f04590674f3648f4cdc9dc8ce32da2a282074cd0b020596ee033d12d385185",
|
||||
"2af01e0d6bd1b9fbb9e3d43157d64590fb27dcfbcabe28784a5832e17befb87b",
|
||||
"35b23cd02d2d75e55cee38fdee26bc82f1d15d3c9580800b04b0da2edb7517ea",
|
||||
"7e0c255fd3d0f9b48789a944baf19bf42c205a9c55199805eb13573b32137488",
|
||||
"ee0e01eb17fc6cb4cd2d9300d2f8945b51056514f156c6bc6d491b74496d161a",
|
||||
"cbc5ef6b01cbd1ffa2cb95a954f04c385a936c1a86e1bb9ccdf2cf0f4ebeaccb",
|
||||
"ec6e36d5c9eb874f1db4253ef02377f7cc70697cda40fbfb24ded6b5d14cce4c",
|
||||
"2779f3d9f42c7dee17f0e6bcdcf89a8f9d592d19e3b1bbd27ef1cffd1a7f98d1",
|
||||
"976713246c36db1a4364f917b98633cbe0805d46af880f6b50a505d4eb32ed47",
|
||||
"8766a54ef9a170b3860bc66fd655abb24b5fda75d7d7ff362f44442fbdeb47b9",
|
||||
"ea2e3c814d08a378f8a5b8faecb2884d05855975c5ca4b5c25e2d6f936286f14",
|
||||
"e2ccf7cf20403f3f2a4a55b328f0de3be38558a7d5f33632fdaaefc726c1c8eb",
|
||||
"07eced8b63b883cedbd8520bdb3303bf9c2b37c2c7921ca5c59f64e0f79ad2a6",
|
||||
"532d830dffe09c13e75e8b145c825718fc12b0003f61d61e9077721c7fff93cb",
|
||||
"1afe0c74e3d7784eba93a5e3fa554a6eeb01928d12739ae8ba4832786808e36d",
|
||||
"c708943ea349519dcf56b2a5c138fd9ed064ad65ddecae6394eabd87a62f1770",
|
||||
"ccaa58e37c99c85bc5e754028a718bd46485e5d3cb3345691ecab83c755d48cc",
|
||||
"5b0e8da6fdfba663038690b37d216d8345a623cc33e111afd0f738ed7792bc54",
|
||||
"f2c96c97f6419a538f84cf3fa72e2194605e1848096e6e5170cce5b76799d400",
|
||||
"aa55a479ad6934d0fd78f3dbd88515cd1ca0d7a110812e711380d59df7598935",
|
||||
"bd9eb657c25b4f6cda68871ce26259d1f9bc62420487e3224905b674a710a45a",
|
||||
"69a80567e79b6b9bc7282ad595512df0b804784616bedb623c122fad420a2635",
|
||||
"fa984bd7dbb282f07e16e7ae87b26a2a7b9b90b7246a44771f0cf5ae58018f52",
|
||||
"df173277182f3155d37b330211ba1de4a81500c02d195e964f91be774ec96708",
|
||||
"675b84fe75e216ab947c7438ee519ca7775376ddf05dadfba6278bd012e1d728",
|
||||
"91c9a5e1a9744114c6fe2d61ae4de82629eaaa0fb52f48288093c7e7e036f832",
|
||||
"24ebb6b58d0b984a965b76f82ce9eff8795cc95085a4d09dedc56949ed596ada",
|
||||
"bb1cf5250435ff475cd8b32acb23e3ee7bbe8fc38f6951704b4798513947672c",
|
||||
"922945779f93fd0b3759f1157e3d9fa20f3fd24c4b8f2bcf520cacf649af776d",
|
||||
"3d842afecd5e293f28b6627933704a3fb8ce153aa91d790ab11f6a752d44a42d",
|
||||
"e8d67c435a4a59304e1414280e952efe17be4254fca27916bf63f9f73e54aba4",
|
||||
"c1fc7771f5fa418fd3ac49221a18f19b42ccb7a663da8f04cbbf6c08c80d20b1",
|
||||
"8eee8f5a002e533e9f9ffef14c713da449c23f56f4415e7995552075a02d1d37",
|
||||
"c998a5739f04f7fff202c54962aa5782b34ecb10d6f915bdfdd7582963bf9171",
|
||||
"a536ab1f7f3c0133baadbdf472b1ac7ad4b774ed432c1989284193572788bca0",
|
||||
"c9b19ffcd43e6a5f23b3d27106ce19e4ad2df89ba1031dd4617f1b591e108965",
|
||||
"e6ee5b449c220defea6373b8a7e147cabd67c2bdb5016886bf6096a3c7435a61",
|
||||
"a619cf1a888a73211bbf32e0c438319f23e91444d45d7bc88816ed5fcb7e8fa3",
|
||||
"56a6b75373c8f7b93c53bcae86d8ffbaba9f2a1b38122054fcdb7f3bf645b727",
|
||||
"89bfe407c647eb1888871f756516bb1906254fba3132d516ce9099614e37d10c",
|
||||
"b7ed68b062de6b4a12e51fd5285c1e1e0ed0e5128cda93ab11b4150b55ed32fc",
|
||||
"4657dfe8965be8980a93072bcfb5e59a65124406db0f819215ee78ba47934b3e",
|
||||
"d61f3bc5b3eb4400efdae6169a5c17cabf3246b514361de939ce4a1a0da6ef4a",
|
||||
"58ead82fa15b550094f7f5fe4804e0fe75b779dbef2e9b20511eccd69e6d08f9",
|
||||
"fcf6fee0e959c7195dadc5f36fe5a873003b389e7033293b06057c821fcbc9c5",
|
||||
"6681268ace4748d41a4cfcc1e64006fb935bbc359782b3d9611f64d51c6752d9",
|
||||
"e76450df94f84c1c0b71677a45d75b7918f0b786113c2d038e6ab8841b99f276",
|
||||
"a44dbc9aaa357176a7d4f5c3106846ea096b66de0b50ee39aff54baab6c4bf4b",
|
||||
"281e109d2a2899bb0555cf0c3a69b24b3debd61885ca29ef39b95b632be25fe7",
|
||||
"5be6446aa8a31c11b3b453bf8dafc9b346ff328d1fa11a0fa02a1e6461f6a9b1",
|
||||
"e1ff3bfdd4e40315959b08b4fcc8245eaa514637e1d4ec2ae166b743341be1af",
|
||||
"0d6c8388dcb049b8dd4fc8d3d8c3bb93de3da90ba828e4f09c8ad0f346488a33",
|
||||
"9be0be0e64d38a29a9cec9a5c8ef5d873c2bfa5362a4b558da5ff69bc3cbb81e",
|
||||
"c48e29f04b482cc01ca1f9ef8c86ef8318c059e0e9353235162f080f26e14c11",
|
||||
"4c7f826edf647462f744b3f16d485f53c797eabdb21cc8a7bb0713283b88e629",
|
||||
"d1621db4d91b23180707b8d4eb9b599fa8ec1dfc2453793a1f83878bd4bbc9d8",
|
||||
"4b74667f89358cd582ad82b16a2d24d5bfcb89ac4b1347ee80e5674a13ba78b2",
|
||||
"83d999a148625c3d2bb819af3064c0f6a12d7da88f68b2c69221f3a746171d19",
|
||||
"b6494a74d18a2dfa3f80ced9fadae35807716fce1071e4de19e2d746b6d87606",
|
||||
"b9c411db4036219e3dfcbe28d60e550b46cce86260fcf2c65d281258e437556f",
|
||||
"2590201e2919a8aa6568c88900192aa54ef00e6c0974a5b0432f52614a841ec8",
|
||||
"c15a5a65986e7ab4134dee3ab85254da5c5d4b04e78b4f16c82837192d355185",
|
||||
"dab6c6065c439b9bafb0b0f1ff5a0c68273bce5c1959a4158ad6a70851f507b6",
|
||||
"baf27a4cc4da49913e7fdecc951fd3b971c9279959af62b02b761a043c33384c",
|
||||
"6c237d8b3b120251c38c230c06d9e48f0d3017657c5b65c8c36112eb15c52aeb",
|
||||
"f173040998481bcb2534a53433eafb8d6ea4c7b0e1fc64572830471fe43fc77d",
|
||||
"36732cc35fe56185af1b11160a393d6c73a1fe41ddf1184c10394c28ca5d627b",
|
||||
"126103bfddc8df256b6e0abfd7f3797c80dcc4ea88f7c2f87dd4104220b4d65f",
|
||||
"457e17b7ea97a845a0d1fa8feda9976596678e3a8af46dc6671d40e050ce857d",
|
||||
"1739d937dc8c0c7370aa27585938c119e25c41f6c441a5d34c6d38503e3136ef",
|
||||
"b676ded7c768d66a757aa3967b1243d90bf57afb09d1044d3219d8d424e4aea0",
|
||||
"33bd77e5394520747faae1394a4af5fa47f404389676375b6dc7be865ed81452",
|
||||
"fe7f6bc6f7338b76bbf80db402ade65953e20b2f23e66e898204b63cc42539a3",
|
||||
"4f83ef69228b3e09b0abc11ded9e6b85319c0b7fef1a044b8ee9970e38441817",
|
||||
"4523be58d395b1b196a9b8c82b038b6895cb02b683d0c253a955068dba1facd0",
|
||||
"d8f38b894b42f7008305cebf17b48925654f22b180c5861b81141f80ccf72848",
|
||||
"9c557e253213c127a86e333ff01c9f12f63091957efafd878d220a0e2cb1193e",
|
||||
"9eab64e92219ccedb15ea9b75ababaa4ae831451019394e0e3336390c3a742d8",
|
||||
"43dedbafef3748c3f9146b961c9b87a3f5cdb1ccb50b4f5890e408702a27a506",
|
||||
"17717ad4d20e2a425cda0a2195624a0a4a73c4f6975f16b1593fc87fa46f2d58",
|
||||
"ee6ea13ab9fe5c4a68eaf9b1a34fe014a66b40117c50ee2a614f4cda959b6e74",
|
||||
"4d023ce9dfd75a7f3075b8e8e084008be17a1f750c63b5de721e6ef883adc765",
|
||||
"d91191e30e00444b942c0e82cad470b32af171764c2275bee0bd99377efd4075",
|
||||
"4eb88310d6b4ed95c6d66a395b3d3cf559b85faec8f7691dafd405a92e055d6d",
|
||||
"0aeb0814c99a13df50643ca27b831a92aaae6366f54e9c276166347aa037d63a",
|
||||
"16f1a0100d4cfffbcc4230e8e0e4290cc5849c1adc64d6653fda07c031b1074b",
|
||||
"148d1366a5e4672b1321adf00321778f86a2371a4bdbe99133f28df0b3d32fa1",
|
||||
"7076f6592de184f9e912c617c46e5e83bad91d3b7f88b7b54cc73bf8ca493321",
|
||||
"dea51494fec5947d27ca659b73dd281ff5bdba3f89f5da1977a731ad0c22e725",
|
||||
"aa97e3d392b97c21327081cdb3cb674dfa8c9c663493db43799a4079555ad1b1",
|
||||
"b7dfbebf760efb2c756b3cae22baafdbbdf55abb782f85e93b4db804d5cba7e3",
|
||||
"ca696d7edb4a86be7c7d9872bd2f9a44440cf8e2de7853536cbb3a60ae89641f",
|
||||
"ff27d01cb1e56fb58580306c7ba76bb037bf211c5b573c56e4e70ca858755af0",
|
||||
"58dece9ff68d021afe76d549b9e48e6cb7b06a5c14cdf45332c8ed7321d6f211",
|
||||
"78688c1f371e7b923d95368c9298cca06c1ec0a89ea897aa181bd60091121fea",
|
||||
"9e8dd91d21e867dec464868a8d1f4a27c0e113c53e32f2bec0a7c6e25ad2e9d5",
|
||||
"6d028aa49aa1f584b3d35aee9fcee8e3c0d81108114289aa046a7969b21eb5f5",
|
||||
"2c470abbac95a49cd0ed5b3b9e628ffda1dbb03c14caba1a225a9b8bf1dc9d5f",
|
||||
"9d7af6946b320b3ba6b4d386de2b2cf3f8ac52fdcb63f3343d1a8362693a3ce5",
|
||||
"d7a4345c3ead1ea7a34bd6aae43c63cbd81941d9ba019fe972843e5ce78e3187",
|
||||
"00f471f6312ce408f7eb56592a2b6c6b5f54ac2967c77f4c1498903b598e1b16",
|
||||
"41e9e2c8398583b90204f8e35c2a4c036aeebac6d05dbdc3e7fb44a1d6bd65a2",
|
||||
"0d978064b9054c023111926050d983573dac2aff16bb8a7497fde8ad725357c0",
|
||||
"507f5bf8367c1883f115ddf9ee95f79ea693c720eb5a5a8718443a99fa308954",
|
||||
"9a090f86adf9fbdc37de2a14745e73d6e1fd096d3da0670b6795ce5ad3cfeea3",
|
||||
"f2b7c5787424c8f9cf6c4480eb99f4a3770cc06337a4f0d1b109ba849b464193",
|
||||
"6ffe93bb72d4ac788fd8be2dadf5bb4a2f14a330d530b0e68bd733c9744c6619",
|
||||
"3ebc74907d1f928f209ef210e872cac033eaf3ff89e6853286d45d91e351ef9e",
|
||||
"da56c54b5e6749d65ad038c196478794af94e4fa5a4efdc20b49981e4ec566c3",
|
||||
"266815e0c9210dfa324c6cba3573b14bee49da4209a9456f9484e5106cd408a5",
|
||||
"5b705e6cb602425c019202dd070a0c009b040ac19960eeef2d8a8fab25c1efe5",
|
||||
"9ec7a778167afb1d30c4833de9322da0c08ba71a69e1911d5578d3144bb56437",
|
||||
"e771af0b05c8e95fcdf6feb3500544d2fb1ccd384788e9f490bb3ee28e8ed66f",
|
||||
"40b9c85fffeafc1cadf8c30a4e5c88660ff6e4971a0dc723d5ab674b5e61b451",
|
||||
"efe5d120df0cc290fa748727fb45ac487caad346d4f2293ab069e8f01fc51981",
|
||||
"408f636bd26fcc5f29889033b447cb2411f60ab1b8a5fc8cb3842dab758fdeb5",
|
||||
"d8a2c33f2e2ff3a9d4ff2a5593f3d5a59e9167fa5ded063d0e49891776611e0c",
|
||||
"02a11d1545114ab63c29958093c91b9f88618e56fee037b9d2fabcff32f62ea9",
|
||||
"1bbd7fdf68eaf5c19446c3aaf63b39dd4a8e33548bc96f6bd239a4124d8f229e",
|
||||
"726a1e261cc6474674e8285e3951b3bb139be9a773d1acf49dc868db861a1c11",
|
||||
"167e7fe01a76b6bec9d2a9b196b18c72e150e985fbeb46ee651869e7b4032785",
|
||||
"c88f94f0a391b9aaa1ffefd645253b1a968b0a422a876ea48920a95d45c33f47",
|
||||
"cd169bd8fbd5179e2a8d498ffc31d3ae0e40825ff2b8a85ea359c4455a107ca8",
|
||||
"fd38f135ef675eac5e93d5b2a738c41777c250188031caf1dcf07b1687a1fe49",
|
||||
"6b4c612991132cf4c6c390dceaae75041b9954ba4f9c465aca70beb350815a57",
|
||||
"8fec426247845bdd26f36ae4f737508c15dbec07d43ce18f8c136ab9e35ac212",
|
||||
"af551accea482000bdccb34bd3c521558e1f353773a3caed83a147921c369ea1",
|
||||
"a664a4973cd23e9f3b35a62429f7671aba2c2ae68c03313913b5c2d77269d906",
|
||||
"18f54af1e10c5bb7a35468b0f62b295d12347903c9f95738d065c84bef1402ef",
|
||||
"be39043cc12efbddfee564d95da751a71df6c139e2def45c431cadeb4a573ca3",
|
||||
"01ddee289b1a2e90874ca3428a7a414764a6cad1abfaa985c201e7aada16d38c",
|
||||
"da25cf7b457bddb6b7bc8e1b0146c0fa85373807d6efdac955199fd01fd53c1f",
|
||||
"ec380784d96b93d404166a6bf1a778227a94a02bdf499f94b5f48f61f5b1350f",
|
||||
"6538925ebfb661f418d8c7d074bee2e8afd778701dd89070c2da936d571e55c3",
|
||||
"9edd72eb23222c969379d90d60ec82891b7c827188bb28510a863f59cb697b0a",
|
||||
"09222857afceb23c66c99fc93d8e5ebda6d7aad901eb38af73c508f117685012",
|
||||
"744ecc9a119a92da88b1f448b4030cdbc2fec5c37ea06ebdd026e742b002af7f",
|
||||
"f531a8672baa2415b271e866dbe11fb08640f6c0e5d98f918bd0308e7169b5b7",
|
||||
"44dc1c2db9c3fbd7bee9257eceb52be3cf8c40baf7b63f46e56b58a131c74f0b",
|
||||
"89e14be49ed0073da83b678279cd29ba5ad86cf000b6a3d1a4c3dc4aa4fdd02c",
|
||||
"8fb140b4e8ddef97ce4b821d247278a1a4353362623f64021484b372f948000c",
|
||||
"72f9755501e1a4464f7277d86120f67e7f7ec3a84ef6813cc7606bf5e0870ff3",
|
||||
"3d99feac152027ede63326aa4f43d4ca88e4cd27296b96fe18c55d496a8f6340",
|
||||
"2540d50aeb9be889c3bd050c9cc849b57b156a2759b48084a83db59aa9056eb4",
|
||||
"b66be78da89991544a05c3a2b63da1d15eefe8e9a1bb6a4369f8616865bd6b7c",
|
||||
"2f5de0003db84ecd5449128350c66c7fb63e9d02b250d84af84f463e2f9bcef1",
|
||||
"2045369fc115b138d1438f98d3c29916986c9fde6b8203f7ff8699f0faee1c93",
|
||||
"ae1008d23930b776c18092f6eab41e4b09fcf3f03f3641b1b4e6ee3aa166d760",
|
||||
"1ec454734dcbf6fe54901ce25c0c7c6bca5edd89443416761fadc321d38df139",
|
||||
"b2d670de53b27691c0c3400225b65c35a26d06093bcc41f48ffc71e0907f9d4a",
|
||||
"ac3f6afe17593f61810513dac9a1e544e87b9ce91b27d37b88ec58fbaa9014aa",
|
||||
"d1f3c71639ae3bba17ffc6c8deb1fdb3a56506b3492213d033528cc291523704",
|
||||
"6b4a29bbd43d1d0eeead384f512dbb591ce9407d27dba48ad54b00d9d2e1972b",
|
||||
"84de08882b6e36705cf6592ee58e632dd6e092dd61c13192fc80cbbc0cbc82cc",
|
||||
"d3d74124ddfb5bdc61b8f18d17c3335bbb4f8c71182a35ee27314a49a4eb7b1d",
|
||||
"a008def15796fba9a0d6fab04e8fd57089285d9fd505da5a83fe8aad57a3564d",
|
||||
"eb7246eb8e26b0c48dd4f9c2a822a0f4d5c84138937195090932b61a2d756051",
|
||||
"683211bd155c7b764e4b99ba263a151d81209be7a566a2bb1971dc1bbd3b715e",
|
||||
"5468bceeb74ce35cb4173dcc9974bddac9e894a74bf3d44f9ca8b7554605c9ed",
|
||||
"78ce6faa72264387284e647ba6938995735ec8c7d5c5a65737e55130f026307d",
|
||||
"2754fc862d6bc0b7c3971046612d942563d181c187a391e180ed6b00f80e7e5b",
|
||||
"f1725586a402c06aec818d1478a45aaa0dc16c7a9c4869d97c350336d16f8e43",
|
||||
"6a359852238dc902aed19fbbf6a055f9abf21c1ca8915d1c4e27f50df2f290d9",
|
||||
"9e4954853fca260cecf983f098e5204c68b2bdfebde91f1f7b25c10b566d50f8",
|
||||
"3356de61b39647931ce8b2140b2bab837e0810c0ef515bbe92de0248040b8bdd",
|
||||
"3e294d2fd339bb16a5403a86e3664947dd408c4d87a0066524f8a573ae53ca8e",
|
||||
"21335073401a310cc9179fe3a77e9666710cfdf630dfd840f972c183a244b1ad",
|
||||
"987096ef8a2853fea1a31b0ed5276503da291536f167bbf7f3f991c9f05d6d7f",
|
||||
"7a78fbfec68c2b3ab6084f1f808321ba3b5ea47502c41115902013e648e76288",
|
||||
"c12a2bcb002fd74b4d342f9b942c24c44cc46d5ed39201245a8b6f4162e7efce",
|
||||
"8867bed93e89c93d0d8ac98b2443c5554799edb9190346946b12e03f13664450",
|
||||
"9b61cd02adac4b18fbcc06237e7469b07e276faf6ec4ecb34b030c2e385892a0",
|
||||
"0463223adf38df9a22a7fb07999a638fdd42d8437573e0bf19c43e013b14d673",
|
||||
"9989500413fb756d8437912cc32be0730dbe1bfc6b5d2eef759e1456c239f905",
|
||||
"17e2889fba01021d048a13fd0ba108ad31c38326295460c21e69c43fa8fbe515",
|
||||
"6c6c253fe26a5b2abf440124e35dcaa39e891cd28274431ba49da5c11d89747d",
|
||||
"9d065f84c0cba7b0ef86f5d2d155e6ce01178a8a33e194f9999b7497b1b2201b",
|
||||
"5ffb8e1b6b629c0e34a013f9298ebb0759b98a3d24029916321d5eb4255b6735",
|
||||
"3fc5f8553abd753ac47967c4c468cfd08e8cb9dee71b79e12d5adab205bc04d3",
|
||||
"ff82c8b53aa53a9705200690b91c572e2e4918f1a88de5d923ac06fa4560fa19",
|
||||
"4d4ab737e2fbb5af0fd590b4b7e8c6fe76d3a02a9791ef7fdacf601f9e50fad8",
|
||||
"5eca50a04afaefe55659fb74810b42654e2268c1acca6e53801b9862db74a83a",
|
||||
"d700fc10d457eeae4f02eb04d715a054837e68a2e2d010971382c5e1016dc99e",
|
||||
"af321973db865bb33fbc50a4de67fc0e6808d812c6e4dfd9cbc2fd50275b1dfd",
|
||||
"bbf923aa9246065f88c40c7d9bf61cccc0ff3fcff065a8cb2ff4cfbb62088f1e",
|
||||
"268b948b5aab4bab0e5430ee49e3cff11776cf183df93b32159f9670ed541495",
|
||||
"3d2e51508699f98f0f2bdbe7a45b673c687fe6420f466dc296d90b908d51d594",
|
||||
"4d4fb5ff0afb8c04e6c6e03f51281b664576f985e5bc34a3a7ee310a1e821f47",
|
||||
"9b12847f3d28bf8850ebc03f8d495a1ae8f9a2c86dbda295c90556619a3ee831",
|
||||
"733c5427f55ceba01a0f6607ab0fd11832bbb27d7db17b570e7eb7b68a081d9a",
|
||||
"4bc7982c4ee4078b2ada5340ae673f18d3b6a664b1f97e8d6799e6074cb5c39d",
|
||||
"afa0f26dbf3e674630d1cd6499e86c14f316cd4f78c6ab73bb85b00aa9c50a57",
|
||||
"c301f13372c8f0d9bc8186d874fa45fa33aede13e66f4187a3bd22ee41c95b2a",
|
||||
"548a29f145187fc97689f8ae67944627723c315c163b0dbb88842e50c681d7ca",
|
||||
"d0a1ffb8761b974cec4a3be8cbcb2e96a7090dcf465ffeac839aa4ca20c9a59e",
|
||||
"faaf47af27e3de06e83f346fc6ccea0aabfc7520d82ffe90c48dfcd740c69caa",
|
||||
"3eacaa768326d7dce80f6ee17ada199bebe7eb3c1a60b39b14e0a58bbac66fe4",
|
||||
"7f5237e9f77a22c4a89624c7ac31cae797d8ac4144b02493890d54fee7399bcd",
|
||||
"d84517802a434757c56ae8642bffb4d26e5ade0712053750215680f5896e579b",
|
||||
"bdb96ad31ac6af123c7683c55775ee2138da0f8f011e3994d56a27270e692575",
|
||||
"aab1b0caf13b9bd26a62cf8b3b20f9bfaa0e56f3ec42196a00fedf432e07d739",
|
||||
"c230edd34ca5c8318bf4592ac056cde37519d395c0904c37ea1c650b8ad4a712",
|
||||
"ce41c1698a8c042218bc586f0b9ec8d5bffa3dcbcea09bd59db9d0d92c3fc0b4",
|
||||
"b9a537523bba2fcdae857d90d8a760de4f2139c9f90d986f747ce7d0ec0d173d",
|
||||
"1a6e0aeff1dba7ba121fbeb33bf3162901495df3fcb4e4a40423e1c10edf0dca",
|
||||
"21b419102da8fc0ba90484aec934bf55b7abcf75eedb39124e8d75e491f41a5e",
|
||||
"2183e94758481d0f124fbd93c56ccaa45e7e545ceeb8d52848f98253f497b975",
|
||||
"e3f98bfb9cbeb7563a139983602e50f616cb7ebb06c3295b8ee377328f051206",
|
||||
"b5b9b84d1723994d06013606227fb5b91f9de8820b04cf848d1dccc23d054f39",
|
||||
"07adfda9c5adc80881bb2a5220f6e3181e0c043b90fa115c4f183464022968e6",
|
||||
"facdaf1ce758bdf04cdf1a1fa32a3564a608d4abc2481a286ffc178f86953ef0",
|
||||
"efc83f01c8fb309df2c8866b8c7924cc8b6f0580afdde1d6e16e2b6107c2862c",
|
||||
"52b4a076bcbbbdc3a1aefa3735816cf74993b1b8db202b01c883c58be7fad8bd",
|
||||
"c6f7077f1699d50cf92a9652bfebffac05fc6842b9ee391089d959b8ad5d48fd",
|
||||
"e7424ad457e512fdf4764a56bf6d428a06a13a1006af1fb8e0fe32f6d03265c7",
|
||||
"27797bd4e5ee52db0a197668c92b9a3e7e237e1f9fa73a10c38d731c294cfc9a",
|
||||
"7bdef7bdebb8721f77927d0e77c66059360fa62371fdf15f3add93923a613229",
|
||||
"3335d373e6c1b5bc669b4b1220c08728ea8ce622e5a7cfeeb4c0001d91ded1de",
|
||||
"645681b9d067b1a362c4bee8ddff987d2466d49905c26cb8fec5e6fb73af5c84",
|
||||
"06b7819d7f1c7f5472118266ed7bca8785dceae09e36ea3a4af665c6d1d8327c",
|
||||
"7a6b8c7de171955c214ded7e35cc782cd6dddfd141abb1929c632f69348e6f49",
|
||||
"eb882b0bb659bf72235020a0b884c4a7d817e0af3903715736b146188b1d0868",
|
||||
"2ae6c71323a225ecfa8cf655600ebbe12b1019ff36bf02726d82d095aab29729",
|
||||
"c2f85a06279a7bfa7f2477a3cee907990231a13d17b54524738504bd12e0c86c",
|
||||
"f1b911af1c7a56073e3b83ba7eaa681467040e0fbbdd265445aa80e65c274c22",
|
||||
"a54c2ae6ec6ac06b4d7b45c483eab86ac226b8ecfa99163ef7cc000da9b40895",
|
||||
"bbc73cae41502ddad7a4112586dcaf4422810d60aa4b57c637ccd1a746b07844",
|
||||
"218238431393959d6c8617a3bd899303a96609b44a644e973891038a7de8622d",
|
||||
"59cacbd83ad5c54ad91dacf51a49c06e0bef730ac0e7c235a6f6fa29b9230f02",
|
||||
"ba80990666ef0b6f4ba5059347beb13242921e54669e680064ca755256a1e3a6",
|
||||
"031db544f3158556508b321db59afd62c5bb1592021e5dfd9ff87dca0ad27d8c",
|
||||
"ee85604f8ec6e4e24f8eaf2a624d042ebd431dae448fe11779adcfb6bb78575e",
|
||||
"266ee74062e8dae0aeddfcd0f72725107598efaa80c1a7176d6ee6dd302bce4c",
|
||||
"b83a28b7e4e5d20bd960c5faeb6625f95529166b8bdb045d42634a2f35919450",
|
||||
"dbe0605a9c73172bad7523a327b236d55ea4b634e80e78a9013db791f8fd5b2c",
|
||||
"1e53e900c3bbc5ead295215efe27b2c8d5fbd15fb3dd810da3063674cb7213b2",
|
||||
"832a2b3cef4b1754c4a7572964a44db64d19edf627ec45179b519d0a5eae8199",
|
||||
"4c800257a588a82849d049817c2bdaad984b25a45ad9f6dad66e47d3b47e3b2f",
|
||||
"3743244390be53473a7e3b3b8d04dce83f6c9514b81a997fb3b123c072ef9f78",
|
||||
"f96c3d76497074c4c83a7b3823380e77dc73d5a9494fd2e053e4a1453e17824b",
|
||||
"d04ecf33a303a59852fdb681ed8b412201ba85d8d2199aec73cb62681d62aa90",
|
||||
"0cca6201658d5d98239c1511ef402562ff7d72446fb201a8d1857c39e369c9fa",
|
||||
"61066504617ee79387021e18c89fb79d1ddbc3e7bff19cf2298f40466f8715e9",
|
||||
"7adb520c3ac7cb6dc8253508df0ce1d975da49fefda9b5c956744a049d230ace",
|
||||
"7579076d9aff0a4cfdefa7e2045f2486c7e5d8bc63bfc6b45397233e1bbfcb19",
|
||||
"9ec078ef9ca31e1bdbb97175dde1cb00bf9f7225e6f622ccc8d367302e220497",
|
||||
"93e174736c4719f80627854aca8b67efd0b59558c8ece267a8eccbbd2e7c5535",
|
||||
"e62f419a0e16607b96ff10ecb00249af7d4b69c7d121e4b04130c61cc998c32e",
|
||||
"171ddd43dab1af0d1fb14029287152a4c89296890e0607cf5e7ba73c73fdf1a5",
|
||||
"604e96e099936a104883958b040b47672e0f048c98ac793f37ffe4c720279eb2",
|
||||
"7726c437ccf791f6ded97dbac1846e62019e5fbd24f42e9db2f640f231c3c09a",
|
||||
"1bf9f239dca1636149bc2f3fc334077ae959ea9607cacf945ef8f8bb227dc5e1",
|
||||
"fcd818454002a6c47a980393f0549ac6e629d28d5688114bb60d831b5c1832a7",
|
||||
"56172b53f730750b40e63c501b16068dd96a245e7f0551675c0fec9817ee96e0",
|
||||
"260d3a820b7f8de20f4972725999b1af88b0cc5554ca38f9681c8d657e043cc3",
|
||||
"9ba8c688f091ca48de2b0f9bc998e3bc36a0092149f9201767da592849777f1c",
|
||||
"61594d714aa94fe551f604123578c4a6592145f4228ad8601224b1b89ce401b0",
|
||||
"416ca193aa5448b8cca1f09642807765cc0ee299609f972df0614cfb8ea2f2b1",
|
||||
"9b6d95b76a01191a4c778185681ed7f3bced2fffa8e41516ec78240b213285f5",
|
||||
"ee0304bae0d4679bb34347ce3b1b80482262b9812bd0c0d5e19a5e2445043b75",
|
||||
"8027a1877f39e603dafc63279e004b4ed9df861d18ce81d9c43c7d7135da8f65",
|
||||
"42b409ff9b261a985227b1ab92707e706777ac14de24654d7e05f0501b37e003",
|
||||
"99097983b74c70800b182abc6f64046ab70407e9cabcd6cf570a38ada9ef75d5",
|
||||
"de8ca7a6b3f7314e91921d4dc5e915fb7bc2bd32129ea6966322effa48050c4c",
|
||||
"dcb302978215f54f33c3d2d7157ef69fd5058cf488fc43dd75c32b5dcaf47e7a",
|
||||
"7c765d407d3a9d5ea117cb8b8699628560787fc084a0c76afaa449bfbd121d84",
|
||||
"06639a386c9c1014217622ccbcf40908c4f1a0c33e23f8d6d68f4abf655f8f71",
|
||||
"59f1b5faf29904fe94a6a042e2d82d80d68fc16ad7651eba90a8de39f63f8fe8",
|
||||
"174398550d1468a41b98a09f496c38d3694feadef0f0073fd557610384bafb10",
|
||||
"00d52016bd7e4aae8bf8eaa23f42276b649fe557483b5d7684702633dd0fd944",
|
||||
"9a39bf837c868d61ed8cce6a4c7a0eb96f5e5bcc082ad6afdd5496cb614a23fb",
|
||||
"74ffc51cc30150cf79b6cb316d3a15cf332ab29a38fec9eb484ab1551d6d1856",
|
||||
"97c70a44366a6535c145b333f973ea86dfdc2d7a99da618c40c64705ad98e322",
|
||||
"6c6e3e05e1c9d2aae0ed2431544aea411771dd9d81017539af0fd818b2389f28",
|
||||
"23a2cf63ec81e65561acafc655898d2fd0ef190084653fa452413f75e5a3d5bc",
|
||||
"f1f9b0996d4ff1bf75e79e4cc8577c89eb633e68415c7faf74cf17a07bf80bd8",
|
||||
"e3aefda887252a72cee3578d33b2dcd90e9fe53b8bed6347ef5e26f74211adbb",
|
||||
"6b4ec98f02e647e01440b473bbd92a7fae21e01b6aa6c65e32db94a36092272e",
|
||||
"623ed218de81311783656783d6ce690b521a89c4dc09f28962e5bfd4fa549249",
|
||||
"9ce71f1506ccf4b99f234af49bd6202be883a80f95a155c6e9a1c36fd7e780c7",
|
||||
"139fcc6bb304b2879974c59cda61d86d7816ad4ac0f38ee7a724df488060e65d",
|
||||
"0e8c41eb946657188ea6f2aac36c25e393fff4d4149a83679220d66595ff0faa",
|
||||
"59ffbe1fc829decf90655438bd2df3a7b746ef4a04634d4ee9e280bb6ce5f14e",
|
||||
"e4f695f05bb05b231255ccce3d471b8d79c64a65bccc014662d27f0f7e921092",
|
||||
"39cc53c9e3f7d4980b21bea5ebc8a5b9cdf7fa6539430b5a826e8ad527168656",
|
||||
"685fb49563864326e78df461468795b7e47849a27e713281cd8bb75c0547936d",
|
||||
"05e4649832dfb8d1bfa81ea7cbf1c92c4f1cd5052bfc8d5465ba744aa6fa5eb8",
|
||||
"e5cece49ae3fc2c81f50c8e7a93a5fb1e1585380c467e4822234b64a94add617",
|
||||
"dda028cd1b806b4d494cc7f2789b6c2bd7e3c28ff6a267d03acc5ac6e69a05e0",
|
||||
"046c436b2a525059867b40c81e469b6d83001442fc65312c87a7ce7abeb022ff",
|
||||
"676ffea2ec31426a906d7795d7ebae2ba5e61f0b9fa815995b4a299dd085d510",
|
||||
"15b5cf6cdf4fd1c02f28bcce0f197cafae4c8c7c66a3e2e23af9fe610875315e",
|
||||
"c0fb5367cfcb803c5383f98e26524bed9176e6211588f53ec63fe6079cbfd3df",
|
||||
"7ab00f596b0286b77f78af567ee1be2536feee41daee67bd872f1480b7aa65b9",
|
||||
"e8d66519e43b1214ac68f9f2bdbc4386d41ac66b20c5a260b9b04102784074e9",
|
||||
"e6618db6961dc7b91478e0fa78c4c1b6699009981526693bd5e273972550860c",
|
||||
"b1e1185884a6d14bbfce3899cb53e8183adde642f264d0ff4f1745371e06134c",
|
||||
"cae5b7ea348afefc4c102bb7b125c4928f114739a27b877c6bcfbe5a79280384",
|
||||
"ecbe372132a9323b813eeb48f8dfcedaeca00e2887af181b063c6cfa13ed8ea1",
|
||||
"52387c6b99cc42aac51916b08b7b51d2baddfc19f2ba08d82a48432849dbdfb2",
|
||||
"3c39a7b53dec9ac85acf08b267637a9841e6df7b7b0f5e2ac56a8cf107de37da",
|
||||
"f8e6c64342f1e052480630e27e1016dce35fc3a614e60434fef4aa2503328ca9",
|
||||
"fd0266485777bd73e97c7c37f520c83c82e362fe4c25a6be93f3380083d4646b",
|
||||
"433e80c14ff7b8e16e179ccec35f55833df7dd5a5a063d23117b4b01b6f97170",
|
||||
"b7c6f6915cfa9a62fff6a1f02604de88c23c6c6c6d1b8f62c7cc10749f307e81",
|
||||
"ddf03aca85ade039e6742d5bef3df352df199d0d31e22b9858e7eda85cb3bbbe",
|
||||
"d36e8083fa7b36daee646cb8b3f99feaa3d89e5a396508741f003e21ac0b6bec",
|
||||
"ec79b568bdea63ca6091f5b84b0c639c10a0919e175fa09a4de3154f82906f25",
|
||||
"8cd2d0f8310f7009e94f50231870756cb39ba68f37506044910e2f71482b1788",
|
||||
"0eef96197f5c6be3859b6817e6a5736685856c416e29a2925bd5a15b2a57c8b1",
|
||||
"04918dfc36c93e7db6cc0d60f37e1522f1c36b64d3f4b424c532d7c595febbc5",
|
||||
"c8383d81dd24406745b68409be40d6721c301029464067fcc50a25ddf9139549",
|
||||
"3b3a42d34cf0a1402d18d536c9d2ac2eb1c6019a9153be57084c8165d192e325",
|
||||
"da18e9860040f3bf493876fc16b1a912ae5a6f6fa8d5159c3de2b8233a0d9851",
|
||||
"e3fc673fc5f99cc554d0ff47756795647d25cb6e6658f912d114ae6429d35d35",
|
||||
"3aa5817273c3b2f94f491840e0472f049d0f10009e23de63006166bca9b36ea3",
|
||||
"bbb5dda0e15567979f0543407bdc2033d6f0bbb30f72512a981cfdb2f09e2747",
|
||||
"1096f6be0a4d7f0ecc2df4ed2c8683f143efc81eeba3ece6daadd2fca74c7ecc",
|
||||
"d76726da1b64e8679d8b6e66facf551ba96f2612de5a171fac818ee85ce3e5fe",
|
||||
"27487c9600b16b24a1bfb0519cfe4a5d1ad84959e3cce5d6d7a99d48660a1f78",
|
||||
"5d3ab876c206a37ad3b094e20bfc3941df3fa21a15ac8ea76d6918473789669a",
|
||||
"6b1b8dac34ffc61d464dfeef00e4a84a604e172ef6391fb629293d6f1666148c",
|
||||
"6fb266012c3008303e54ae55140b46957e9978098401dda34f4d921a275bf8bb",
|
||||
"53a91e3a64d1f658e983ac1e4f9e0c697f8f33e01d8debe439f4c1a92113f592",
|
||||
"5082984480f3b27891840a2037512739149678efc2ac981ca8cd016d02304efd",
|
||||
"7b849efa5604b58d50c419637b9873847dbf957081d526136c3a49b7357cd617",
|
||||
"f53b9d91a8cd177fb4a1cf081a1b6d58759a381ef120a7c5a18c0e70cae80983",
|
||||
"cfd7df62799a22e384a4ab5da8c4026c875b119d0f47c2716b20cdac9cc1f1a6",
|
||||
"d83b5ef189df7e884627294b752969547814c3cfe38995cf207c040e03bbe7a4",
|
||||
"96f652249b0946e1575d78a8bc7450123c8e64f1c56f6b2f93bc23fb249ed85a",
|
||||
"d60bdad03468f5f8c85b1b10db977e310a5aafab33750dfadb37488b02bfc8d7",
|
||||
"9839f160d893daae661c84168e07f46f0e1e9746feb8439a6d76738b4ad32eaa",
|
||||
"453a656903a031395d450f318211a6ec54cd79049a851f92cd6702c65ff5f5bd",
|
||||
"1634b87b5fcfd4a6c4ff2f2de17450ccce46f9abe0b02a71876c596ec165bfed",
|
||||
"24480686b56234a240fd9827209b584847f3d4f9657f0d9a97aec5320a264acb",
|
||||
"f4d1866e8599563c52ceeedf11c28b8567e465c6e9a91df92add535d57f02ab0",
|
||||
"805b34f708837dfb3e7f05815ac5760564628b58d5a0ce839ccbb6ef3620fac3",
|
||||
"659a74f6cfbc7c252c58d93452b9d9575e36c464aa6544c6375227c9166a6ed9",
|
||||
"75d737c3472471029c44876b330d2284288a42779b591a2ed4daa1c6c07efaf7",
|
||||
"dac1d8c5a9fe94f224e095b52577c33c2cc2b8f3a2d6ad9cbd46845af8c987f0",
|
||||
"be7358c4fe50148cccafc02ea205d80145e253889aa3958daafa8637047c840e",
|
||||
"30e8cbf1427c137fa60674a639431c19a9d6f4c07fd2959df83158e674fccbaa",
|
||||
"7f573f55d875ce8edc528edf822949fd2ab9f9c65d914a40225663b0a697be07",
|
||||
"781a1527055f74c1f70230f10384609b34548f8ab6a0a6caa74025827f9fdae5",
|
||||
"d82a91e1013170b10ca7fa0ec800fd0dc6e9335b70c303dadba603fc36802b5f",
|
||||
"a4237e420cdb0b3231d171fe879bcae37a2db7abf2f12a337b975337618c3ac2",
|
||||
"7ff4d89f90845ac4d7a50a259163798e0f446e61d4c943cc89637beff567ad02",
|
||||
"48dbb5e717a6221d64fd13ba12794bc28e5067ac1d7632ee9437d533772750df",
|
||||
"efba340bd479176486e5a2281c97ac4a90fdcf86ec9c13a78c3182ab877cd19b",
|
||||
"1021c8921548fa89abb4cc7e8668a3a8dcebae0a4c323ffeaf570438832d6993",
|
||||
"c67cd3e1a83daa56cff16f635db2fdb9ed9619300298d4701a58e68e84098345",
|
||||
"4ad6fa2d16e2a9b576c863b4cf7404a70d4dc320c0c447d10ad6ff58993eacc8",
|
||||
"e568a76a4f8836be296d405eb41034260d55e2361e4b2ef88350a4003bbd5f9b",
|
||||
"ebdee92945ef05283be0ac3de25787c81a6a58a10f568f9c6b61d9dd513adbad",
|
||||
"6e8f3edfa28bfc8057d735794f76b697bcf18fb894a5a37a132693ebda31a464",
|
||||
"576d23dc3db2056d208849462fee358cf9f0f3310a2c63cb6c267a4b9f5848f9",
|
||||
"18905d0a5d623ab81a98ba98c582bd5f57f2506c6b808905fc599d5a0b229b08",
|
||||
"a9046cc9175dc5a45fb93a2c890f9a8b18c707fa6d695771aab9300081d3e21a",
|
||||
"7a69e5f62fcc20e81beea7461a945e6531f8c7944200d0b3cb4cc63556c44106",
|
||||
"fd0266485777bd73e97c7c37f520c83c82e362fe4c25a6be93f3380083d4646b",
|
||||
"4b29db7a76f3b4fbc0a4fffc092e41c14f1a1a975a462d87e82827af03719cb2",
|
||||
"df1a6cb6c95a5bdd2a69e4fa921061da950fc0bb0b3529d04ca75e0c11f871df",
|
||||
"08bfc00b7f72e015f45c326f486bec16e4d5236b70e44543f1c5e86a8e21c76a",
|
||||
"1e908fbc1d131c17a87f32069f53f64f45c75f91a2f6d43f8aa6410974da5562",
|
||||
"b3a737d014a7e75f44b0f5afbd752f9bcc2abe54f60dbbebc3681b6e16611967",
|
||||
"d3052ca3e3d523b1ec80671eb1bba0517a2f522e195778dc83dd03a8d84a170e",
|
||||
"b98ded4ceaea20790dbcb3c31400692009d34c7f9927c286835a99b7481a5c22",
|
||||
"9e1e498420bc7c35f3e3a78d20045b4c8343986dae48739759bccb2a27e88c53",
|
||||
"141d2053cb29535ad45aa9e865cdec492524f0ec0066496b98b7099daab5d658",
|
||||
"8722c3843c85ddd6162a5cb506e1cb4d6ab0cafb966034f426e55a2ef89a345e",
|
||||
"52dfd21724329920c5c95f5361464e468584136d30030eb29247a7fe6c2c6e36",
|
||||
"d82a91e1013170b10ca7fa0ec800fd0dc6e9335b70c303dadba603fc36802b5f",
|
||||
"6bbb7d71eaa2544215a877e136cd7f490f4625eb56459a0da856cc8296d5df30",
|
||||
"1ebb28301aa1a48248d3723a0ea434bb7d4612ec920fa749583e3f41ce25849f",
|
||||
"00000001505e7e48927046e9bbaa728b1f3b511227e2200c578d6e6bb0c77eb9",
|
||||
"a01b5ba26421374250442e0d23f96e6a4bce429e0175cd0769ad8c585dd5a892",
|
||||
"26d6a946675e603f8de4bf6f9cef442037b70c7eee170ff06ed7673fc34c98f1",
|
||||
"1dd7992ea0ecbda7480ceed35748c3655691133c8c78af947fd24299db8f481f",
|
||||
"cdee943cbb19c51ab847a66d5d774373aa9f63d287246bb59b0827fa5e637400",
|
||||
"3d03c53608415b1d718c7786ee10bdb4e67bced32207e32880ee9e44301a19ec",
|
||||
"170dc4045d6c06275b40bd39f68ca31dbb962094e9763ee460f8341bd40bebca",
|
||||
"db1abbff170320730e5ace672ad7217161b8935afc1a896ae2fecf903c159932",
|
||||
"b0ac2c26eabdb0e0a9b0d10fd1458ca73c575b19d65e13f0e7484cbee84038b3",
|
||||
"c1e7fc21b4f9c199e6086e095639f0f16a4e4884544547ce8a653ed7b5b6c4a7",
|
||||
"813c2662366a12f6337b951c048552fd3c4894e403cab701634dcd803786dc09",
|
||||
"fd0bcf8cd1aee83fe75e6c0fdfc543845e5bc3f50d26d2d5e5c6d3fa521f98c0",
|
||||
"45fd1955f590da87c1fd2edb99d17accf235ec3ebf0afe1d3306ade42693c6e9",
|
||||
"27938497226683c701e2843c6db580e2f0e25f5a198f4c3397e3a0a27764215d",
|
||||
"2321edfd415f9558605b4d7a7083c52624e8922ae86bb2ae359fbf829724111a",
|
||||
"0461fcbecc4c3374439932d6b8f11269ccdb7cc973ad7a50ae362db135a474dd",
|
||||
"1f8e182bf72d61cb78dcd6b96dd3be8b874b8881da6630757b6508970f67230c",
|
||||
"c6603b0f1ccfec625d9c08b753e4f774eaf7d1cf2769223125b5fd4da728019e",
|
||||
"296842eaaed9be5ae0668da09fe48aac0521c4af859ad547d93145e5ac34c17e",
|
||||
"88f8707a45e825a13ed383332abe6e2f104ab44d877918be22550083a2b59e60",
|
||||
"27a20b41a66b35d442302a50ca1baad72c2ed844c8d1224c9f6d50a12752084e",
|
||||
"280e847ef0c82a2a7c4e877c91cd7567474c1431b815d27bbc1017e147d9d77c",
|
||||
"ad9d42203fd2480ea2e5c4c64593a027708aebe2b02aa60bd7b1d666daa5b08d",
|
||||
"bb0174ae21a6cac1a0a9c8b4ac6ebfda56ce51605c315b1824970bc275f7239a",
|
||||
"edb470271297ac5a61f277f3cd14de54c67eb5ccd20ef0d9df29be18685bb004",
|
||||
"9609b093450dd7e0afb389619acdaf2e6a0d6817c93552f3911e05b50ae73e3d",
|
||||
"3e33fd7124f174fc535151937f8718634dd9d856143d4cefb5a10ddaf2f615c0",
|
||||
"463555bb4b0f80fd1376fae628fabfaf7e5e31cd2741d80aa4d225c926bc287e",
|
||||
"916cb5ff07d3b51cef7f6b6b7f5479b1001b401c0e82558ee1a22504c7d507c9",
|
||||
"cc5f259f036683798e4a52071dbb97238702ffb6f0c85af6d273c8ddbe5c0afb",
|
||||
"2ad91f1dca2dcd5fc89e7208d1e5059f0bac0870d63fc3bac21c7a9388fa18fd",
|
||||
"8bf629b3d519a0f8a8390137a445c0eb2f5f2b4a8ed71151de898051e8006f13",
|
||||
"94215f42a96335c87fcb9e881a0bbb62b9a795519e109cf5f9d2ef617681f622",
|
||||
"bcbf9644d3f475d00eb9c6e467385ce16d4546c1a24222ccfa542bf776eaba95",
|
||||
"ba5115c37b0f911e530ed6c487ccbd9b737da33fd4b88a9f590860378c06af62",
|
||||
"609f186ca023d658c0fe019570472f59565c8be1dc163b1541fac9d90aa4e8af",
|
||||
"4e5622b575cdbb4d5ded093e48c68cd3f724fad547142f0c1b0aec2f2b2a0b2e",
|
||||
"4df7b43b3a4db4b99e3dbad6bd0f84226726efd63ae7e027f91acbd91b4dba48",
|
||||
"bcbf9644d3f475d00eb9c6e467385ce16d4546c1a24222ccfa542bf776eaba95",
|
||||
"3a06add309fd8419ea4d4e475e9c0dff5909c635d9769bf0728232f3a0683a84",
|
||||
"d2384c4ac623eb9f15ae4cb32ee7a9b03e0202802d52f2395f2ee8f6433151aa",
|
||||
"1d4cc828b657da8c7a101e8657365459b9dc74139bed5d35bd8295b00be2a1ae",
|
||||
"76fcec0e0638351f1d0e0dc4ebaf6dd3d67404126d664547674070f3175273d9",
|
||||
"6707c39e6c53ef945c5df29af78667dc941ed80094994bd264fd806a6e0a3230",
|
||||
"80482e60178c2ce996da6d67577f56a2b2c47ccb1c84c81f2b7960637cb71b78",
|
||||
"147784df719c09fad62bff0493a60b4f5dbbe8579e73f00d207350e8ffdfd65f",
|
||||
"afc0295d2c6e0a1820c214c07312070a4070d52083163d7fe410fa02bf85d9d2",
|
||||
"6a5e3cc17279cbdf051c06d96e3f843cdb296f351d8ca35a6a190c0ab90dbf9a",
|
||||
"3b7fc823611f1aeaea63ee3bf69b25b8aa16ec6e81d1afc39026808fe194354f",
|
||||
"d96fe9c5478d1bb36e9ec40cc678b0bf7ff67e017922a151f925640a8884f291",
|
||||
"1d80e5588de010d137a67c42b03717595f5f510e73e42cfc48f31bae91844d59",
|
||||
"06dde95f0268ce40128bf73ca6e85567b8567688ea52f24dcd5734e77c50f2d9",
|
||||
"f683e87035f7ad4f44e0b98cfbd9537e16455a92cd38cefc4cb31db7557f5ef2",
|
||||
"036533caa872376946d4e4fdea4c1a0441eda38ca2d9d9417bb36006cbaabf58",
|
||||
"7cc328a08ddb2afdf9f9be77beff4c83489ff979721827d628a542f32a247c0e",
|
||||
"f240be2b684f85cc81566f2081386af81d7427ea86250c8bde6b7a8500c761ba",
|
||||
"fb61b93d864e4f0eba766bb8556f2dc0262e8e985012e29ba28508dd52067d98",
|
||||
"0e52122d1eb95cdd8ba5f65815f7d1c9125a8c14d82989eae52ab369eea6c7e4",
|
||||
"04dcaf2552801937d1c20b69adf89646f21b53c17906271d22c7be9bcadb96c0",
|
||||
"0ee827a36e8bb0cfc483cf1872781182c4a16c58acba3ae2d7b155e0370e93b8",
|
||||
"adc14fa3ad590856dd8b80815d367f7c1e6735ad00fd98a86d002fbe9fb535e1",
|
||||
"2bc9be7569515701581d5d765422a17ee3500d8e1f4e7aa53f6be86ae6ba274d",
|
||||
"9a21569255d0a3a9e75f1de2e4c883c9be2e5615887f22b2ecf6b1813bcd587d",
|
||||
"3f3ff7adb39159c42c0aa16d53c0483bfbfad57df22a9e9e9364a306741eb2cf",
|
||||
"e9aa50decff01f2cec1ec2b2e0b34332cf9c92cafdac5a7cc0881a6d26b59854",
|
||||
"c7eda660a6bc8270530e82b4a7712acdea2e31dc0a56f8dc955ac009efd97c86",
|
||||
"787338757fc25d65cd929394d5e7713cf43638e8d259e8dcf5c73b834eb851f2",
|
||||
"7e8ffe414a53ce18a85536eda74d3cbda0da6a98d4fe6c514948002472178c95",
|
||||
"6a04ab98d9e4774ad806e302dddeb63bea16b5cb5f223ee77478e861bb583eb3",
|
||||
"0c371f5ed95076613443e8331c4b60828ed67bcdefaa1698fb5ce9d7b3285ffb",
|
||||
"daa41bedb68591363bf4407f687cb9789cc543ed024bb77c22d2c84d88f54153",
|
||||
"12ee03d11684a125dd87be879c28190415be3f3b1eca6b4ed743bd74ffd880e6",
|
||||
"00dfb20815a71e24572394efdfbf772e56a507921b8287201ab8937496ee8e6d",
|
||||
"94a6a78a5aebbba40bd1aaa2234810132c2d8004bb9177616c413d3c0ddf320e"
|
||||
];
|
||||
const PUBKEY_ARRAYS = generateTestPubkeyArrays();
|
||||
|
||||
// Main testing function
|
||||
async function testRelayPubkeyLimits() {
|
||||
console.log('Starting Nostr Relay Pubkey Filter Test');
|
||||
console.log(`Testing ${RELAYS.length} relays with up to ${MAX_PUBKEYS} pubkeys`);
|
||||
console.log(`Incrementing by ${STEP_SIZE} pubkeys per test, requesting ${EVENT_LIMIT} events per test\n`);
|
||||
|
||||
const results = {};
|
||||
|
||||
// Initialize results for each relay
|
||||
for (const relayUrl of RELAYS) {
|
||||
results[relayUrl] = {
|
||||
maxPubkeys: 0,
|
||||
failures: 0,
|
||||
lastSuccess: 0
|
||||
};
|
||||
}
|
||||
|
||||
// Test each pubkey array size
|
||||
for (const pubkeyArray of PUBKEY_ARRAYS) {
|
||||
const pubkeyCount = pubkeyArray.length;
|
||||
console.log(`\nTesting with ${pubkeyCount} pubkeys...`);
|
||||
|
||||
// Test each relay with this pubkey count
|
||||
for (const relayUrl of RELAYS) {
|
||||
if (results[relayUrl].failures >= 2) {
|
||||
console.log(` ${relayUrl}: Skipping (already failed 2 times)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const success = await testRelayWithPubkeys(relayUrl, pubkeyArray);
|
||||
if (success) {
|
||||
results[relayUrl].maxPubkeys = pubkeyCount;
|
||||
results[relayUrl].lastSuccess = pubkeyCount;
|
||||
results[relayUrl].failures = 0; // Reset failures on success
|
||||
console.log(` ${relayUrl}: ✓ Success`);
|
||||
} else {
|
||||
results[relayUrl].failures++;
|
||||
console.log(` ${relayUrl}: ✗ Failed (${results[relayUrl].failures}/2)`);
|
||||
}
|
||||
} catch (error) {
|
||||
results[relayUrl].failures++;
|
||||
console.log(` ${relayUrl}: ✗ Error (${results[relayUrl].failures}/2): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Print final results
|
||||
console.log('\n=== FINAL RESULTS ===');
|
||||
for (const relayUrl of RELAYS) {
|
||||
const result = results[relayUrl];
|
||||
console.log(`${relayUrl}: ${result.maxPubkeys} pubkeys (last success: ${result.lastSuccess})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Test a single relay with a specific pubkey array
|
||||
async function testRelayWithPubkeys(relayUrl, pubkeys) {
|
||||
return new Promise((resolve) => {
|
||||
const ws = new WebSocket(relayUrl);
|
||||
let receivedEOSE = false;
|
||||
let subscriptionId = 'test_' + Math.random().toString(36).substr(2, 9);
|
||||
let timeoutId;
|
||||
|
||||
const cleanup = () => {
|
||||
clearTimeout(timeoutId);
|
||||
ws.close();
|
||||
};
|
||||
|
||||
ws.on('open', () => {
|
||||
console.log(`Connected to ${relayUrl}`);
|
||||
// Send subscription request
|
||||
const filter = {
|
||||
kinds: [EVENT_KIND],
|
||||
authors: pubkeys,
|
||||
limit: EVENT_LIMIT
|
||||
};
|
||||
const subscriptionMessage = ['REQ', subscriptionId, filter];
|
||||
const messageString = JSON.stringify(subscriptionMessage);
|
||||
const messageSize = Buffer.byteLength(messageString, 'utf8');
|
||||
console.log(`Sending request with ${pubkeys.length} pubkeys (${messageSize} bytes)`);
|
||||
ws.send(messageString);
|
||||
|
||||
// Set timeout for EOSE response
|
||||
timeoutId = setTimeout(() => {
|
||||
if (!receivedEOSE) {
|
||||
console.log('Timeout waiting for EOSE');
|
||||
cleanup();
|
||||
resolve(false);
|
||||
}
|
||||
}, 10000); // 10 second timeout
|
||||
});
|
||||
|
||||
ws.on('message', (data) => {
|
||||
try {
|
||||
const message = JSON.parse(data.toString());
|
||||
if (message[0] === 'EVENT' && message[1] === subscriptionId) {
|
||||
// Skip event content, just log that we received an event
|
||||
console.log(`Received EVENT for subscription ${subscriptionId}`);
|
||||
} else if (message[0] === 'EOSE' && message[1] === subscriptionId) {
|
||||
console.log(`Received EOSE: ${JSON.stringify(message)}`);
|
||||
receivedEOSE = true;
|
||||
cleanup();
|
||||
resolve(true);
|
||||
} else {
|
||||
console.log(`Received other message: ${JSON.stringify(message)}`);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(`Received non-JSON: ${data.toString()}`);
|
||||
}
|
||||
});
|
||||
|
||||
ws.on('error', (error) => {
|
||||
console.log(`WebSocket error: ${error.message}`);
|
||||
cleanup();
|
||||
resolve(false);
|
||||
});
|
||||
|
||||
// Overall connection timeout
|
||||
setTimeout(() => {
|
||||
if (!receivedEOSE) {
|
||||
cleanup();
|
||||
resolve(false);
|
||||
}
|
||||
}, 15000); // 15 second total timeout
|
||||
});
|
||||
}
|
||||
|
||||
// Run the test
|
||||
testRelayPubkeyLimits().catch(console.error);
|
||||
@@ -1,18 +0,0 @@
|
||||
2025-10-11 13:46:17 - ==========================================
|
||||
2025-10-11 13:46:17 - C-Relay Comprehensive Test Suite Runner
|
||||
2025-10-11 13:46:17 - ==========================================
|
||||
2025-10-11 13:46:17 - Relay URL: ws://127.0.0.1:8888
|
||||
2025-10-11 13:46:17 - Log file: test_results_20251011_134617.log
|
||||
2025-10-11 13:46:17 - Report file: test_report_20251011_134617.html
|
||||
2025-10-11 13:46:17 -
|
||||
2025-10-11 13:46:17 - Checking relay status at ws://127.0.0.1:8888...
|
||||
2025-10-11 13:46:17 - \033[0;32m✓ Relay HTTP endpoint is accessible\033[0m
|
||||
2025-10-11 13:46:17 -
|
||||
2025-10-11 13:46:17 - Starting comprehensive test execution...
|
||||
2025-10-11 13:46:17 -
|
||||
2025-10-11 13:46:17 - \033[0;34m=== SECURITY TEST SUITES ===\033[0m
|
||||
2025-10-11 13:46:17 - ==========================================
|
||||
2025-10-11 13:46:17 - Running Test Suite: SQL Injection Tests
|
||||
2025-10-11 13:46:17 - Description: Comprehensive SQL injection vulnerability testing
|
||||
2025-10-11 13:46:17 - ==========================================
|
||||
2025-10-11 13:46:17 - \033[0;31mERROR: Test script tests/sql_injection_tests.sh not found\033[0m
|
||||
@@ -1,629 +0,0 @@
|
||||
2025-10-11 13:48:07 - ==========================================
|
||||
2025-10-11 13:48:07 - C-Relay Comprehensive Test Suite Runner
|
||||
2025-10-11 13:48:07 - ==========================================
|
||||
2025-10-11 13:48:07 - Relay URL: ws://127.0.0.1:8888
|
||||
2025-10-11 13:48:07 - Log file: test_results_20251011_134807.log
|
||||
2025-10-11 13:48:07 - Report file: test_report_20251011_134807.html
|
||||
2025-10-11 13:48:07 -
|
||||
2025-10-11 13:48:07 - Checking relay status at ws://127.0.0.1:8888...
|
||||
2025-10-11 13:48:07 - \033[0;32m✓ Relay HTTP endpoint is accessible\033[0m
|
||||
2025-10-11 13:48:07 -
|
||||
2025-10-11 13:48:07 - Starting comprehensive test execution...
|
||||
2025-10-11 13:48:07 -
|
||||
2025-10-11 13:48:07 - \033[0;34m=== SECURITY TEST SUITES ===\033[0m
|
||||
2025-10-11 13:48:07 - ==========================================
|
||||
2025-10-11 13:48:07 - Running Test Suite: SQL Injection Tests
|
||||
2025-10-11 13:48:07 - Description: Comprehensive SQL injection vulnerability testing
|
||||
2025-10-11 13:48:07 - ==========================================
|
||||
==========================================
|
||||
C-Relay SQL Injection Test Suite
|
||||
==========================================
|
||||
Testing against relay at ws://127.0.0.1:8888
|
||||
|
||||
=== Basic Connectivity Test ===
|
||||
Testing Basic connectivity... [0;32mPASSED[0m - Valid query works
|
||||
|
||||
=== Authors Filter SQL Injection Tests ===
|
||||
Testing Authors filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== IDs Filter SQL Injection Tests ===
|
||||
Testing IDs filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Kinds Filter SQL Injection Tests ===
|
||||
Testing Kinds filter with string injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Kinds filter with negative value... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Kinds filter with very large value... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Search Filter SQL Injection Tests ===
|
||||
Testing Search filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing Search filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing Search filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing Search filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing Search filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Tag Filter SQL Injection Tests ===
|
||||
Testing #e tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
|
||||
=== Timestamp Filter SQL Injection Tests ===
|
||||
Testing Since parameter injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Until parameter injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Limit Parameter SQL Injection Tests ===
|
||||
Testing Limit parameter injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Limit with UNION... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Complex Multi-Filter SQL Injection Tests ===
|
||||
Testing Multi-filter with authors injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Multi-filter with search injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Multi-filter with tag injection... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
|
||||
=== COUNT Message SQL Injection Tests ===
|
||||
Testing COUNT with authors payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing COUNT with authors payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: #... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing COUNT with authors payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing COUNT with authors payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing COUNT with authors payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Edge Case SQL Injection Tests ===
|
||||
Testing Empty string injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Null byte injection... [0;32mPASSED[0m - SQL injection blocked (silently rejected)
|
||||
Testing Unicode injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Very long injection payload... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Subscription ID SQL Injection Tests ===
|
||||
Testing Subscription ID injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Subscription ID with quotes... [0;32mPASSED[0m - SQL injection blocked (silently rejected)
|
||||
|
||||
=== CLOSE Message SQL Injection Tests ===
|
||||
Testing CLOSE with injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Test Results ===
|
||||
Total tests: 318
|
||||
Passed: [0;32m318[0m
|
||||
Failed: [0;31m0[0m
|
||||
[0;32m✓ All SQL injection tests passed![0m
|
||||
The relay appears to be protected against SQL injection attacks.
|
||||
2025-10-11 13:48:30 - \033[0;32m✓ SQL Injection Tests PASSED\033[0m (Duration: 23s)
|
||||
2025-10-11 13:48:30 - ==========================================
|
||||
2025-10-11 13:48:30 - Running Test Suite: Filter Validation Tests
|
||||
2025-10-11 13:48:30 - Description: Input validation for REQ and COUNT messages
|
||||
2025-10-11 13:48:30 - ==========================================
|
||||
=== C-Relay Filter Validation Tests ===
|
||||
Testing against relay at ws://127.0.0.1:8888
|
||||
|
||||
Testing Valid REQ message... [0;32mPASSED[0m
|
||||
Testing Valid COUNT message... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Filter Array Validation ===
|
||||
Testing Non-object filter... [0;32mPASSED[0m
|
||||
Testing Too many filters... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Authors Validation ===
|
||||
Testing Invalid author type... [0;32mPASSED[0m
|
||||
Testing Invalid author hex... [0;32mPASSED[0m
|
||||
Testing Too many authors... [0;32mPASSED[0m
|
||||
|
||||
=== Testing IDs Validation ===
|
||||
Testing Invalid ID type... [0;32mPASSED[0m
|
||||
Testing Invalid ID hex... [0;32mPASSED[0m
|
||||
Testing Too many IDs... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Kinds Validation ===
|
||||
Testing Invalid kind type... [0;32mPASSED[0m
|
||||
Testing Negative kind... [0;32mPASSED[0m
|
||||
Testing Too large kind... [0;32mPASSED[0m
|
||||
Testing Too many kinds... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Timestamp Validation ===
|
||||
Testing Invalid since type... [0;32mPASSED[0m
|
||||
Testing Negative since... [0;32mPASSED[0m
|
||||
Testing Invalid until type... [0;32mPASSED[0m
|
||||
Testing Negative until... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Limit Validation ===
|
||||
Testing Invalid limit type... [0;32mPASSED[0m
|
||||
Testing Negative limit... [0;32mPASSED[0m
|
||||
Testing Too large limit... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Search Validation ===
|
||||
Testing Invalid search type... [0;32mPASSED[0m
|
||||
Testing Search too long... [0;32mPASSED[0m
|
||||
Testing Search SQL injection... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Tag Filter Validation ===
|
||||
Testing Invalid tag filter type... [0;32mPASSED[0m
|
||||
Testing Too many tag values... [0;32mPASSED[0m
|
||||
Testing Tag value too long... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Rate Limiting ===
|
||||
Testing rate limiting with malformed requests... [1;33mUNCERTAIN[0m - Rate limiting may not have triggered (this could be normal)
|
||||
|
||||
=== Test Results ===
|
||||
Total tests: 28
|
||||
Passed: [0;32m28[0m
|
||||
Failed: [0;31m0[0m
|
||||
[0;32mAll tests passed![0m
|
||||
2025-10-11 13:48:35 - \033[0;32m✓ Filter Validation Tests PASSED\033[0m (Duration: 5s)
|
||||
2025-10-11 13:48:35 - ==========================================
|
||||
2025-10-11 13:48:35 - Running Test Suite: Subscription Validation Tests
|
||||
2025-10-11 13:48:35 - Description: Subscription ID and message validation
|
||||
2025-10-11 13:48:35 - ==========================================
|
||||
Testing subscription ID validation fixes...
|
||||
Testing malformed subscription IDs...
|
||||
Valid ID test: Success
|
||||
Testing CLOSE message validation...
|
||||
CLOSE valid ID test: Success
|
||||
Subscription validation tests completed.
|
||||
2025-10-11 13:48:36 - \033[0;32m✓ Subscription Validation Tests PASSED\033[0m (Duration: 1s)
|
||||
2025-10-11 13:48:36 - ==========================================
|
||||
2025-10-11 13:48:36 - Running Test Suite: Memory Corruption Tests
|
||||
2025-10-11 13:48:36 - Description: Buffer overflow and memory safety testing
|
||||
2025-10-11 13:48:36 - ==========================================
|
||||
==========================================
|
||||
C-Relay Memory Corruption Test Suite
|
||||
==========================================
|
||||
Testing against relay at ws://127.0.0.1:8888
|
||||
Note: These tests may cause the relay to crash if vulnerabilities exist
|
||||
|
||||
=== Basic Connectivity Test ===
|
||||
Testing Basic connectivity... [0;32mPASSED[0m - No memory corruption detected
|
||||
|
||||
=== Subscription ID Memory Corruption Tests ===
|
||||
Testing Empty subscription ID... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Very long subscription ID (1KB)... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Very long subscription ID (10KB)... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Subscription ID with null bytes... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Subscription ID with special chars... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Unicode subscription ID... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Subscription ID with path traversal... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
|
||||
=== Filter Array Memory Corruption Tests ===
|
||||
Testing Too many filters (50)... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
|
||||
=== Concurrent Access Memory Tests ===
|
||||
Testing Concurrent subscription creation... ["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
[0;32mPASSED[0m - Concurrent access handled safely
|
||||
Testing Concurrent CLOSE operations...
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
[0;32mPASSED[0m - Concurrent access handled safely
|
||||
|
||||
=== Malformed JSON Memory Tests ===
|
||||
Testing Unclosed JSON object... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Mismatched brackets... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Extra closing brackets... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Null bytes in JSON... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
|
||||
=== Large Message Memory Tests ===
|
||||
Testing Very large filter array... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Very long search term... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
|
||||
=== Test Results ===
|
||||
Total tests: 17
|
||||
Passed: [0;32m17[0m
|
||||
Failed: [0;31m0[0m
|
||||
[0;32m✓ All memory corruption tests passed![0m
|
||||
The relay appears to handle memory safely.
|
||||
2025-10-11 13:48:38 - \033[0;32m✓ Memory Corruption Tests PASSED\033[0m (Duration: 2s)
|
||||
2025-10-11 13:48:38 - ==========================================
|
||||
2025-10-11 13:48:38 - Running Test Suite: Input Validation Tests
|
||||
2025-10-11 13:48:38 - Description: Comprehensive input boundary testing
|
||||
2025-10-11 13:48:38 - ==========================================
|
||||
==========================================
|
||||
C-Relay Input Validation Test Suite
|
||||
==========================================
|
||||
Testing against relay at ws://127.0.0.1:8888
|
||||
|
||||
=== Basic Connectivity Test ===
|
||||
Testing Basic connectivity... [0;32mPASSED[0m - Input accepted correctly
|
||||
|
||||
=== Message Type Validation ===
|
||||
Testing Invalid message type - string... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Invalid message type - number... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Invalid message type - null... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Invalid message type - object... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Empty message type... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Very long message type... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Message Structure Validation ===
|
||||
Testing Too few arguments... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Too many arguments... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Non-array message... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Empty array... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Nested arrays incorrectly... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Subscription ID Boundary Tests ===
|
||||
Testing Valid subscription ID... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Empty subscription ID... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Subscription ID with spaces... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Subscription ID with newlines... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Subscription ID with tabs... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Subscription ID with control chars... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Unicode subscription ID... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Very long subscription ID... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Filter Object Validation ===
|
||||
Testing Valid empty filter... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Non-object filter... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Null filter... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Array filter... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Filter with invalid keys... [0;32mPASSED[0m - Input accepted correctly
|
||||
|
||||
=== Authors Field Validation ===
|
||||
Testing Valid authors array... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Empty authors array... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Non-array authors... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Invalid hex in authors... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Short pubkey in authors... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== IDs Field Validation ===
|
||||
Testing Valid ids array... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Empty ids array... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Non-array ids... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Kinds Field Validation ===
|
||||
Testing Valid kinds array... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Empty kinds array... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Non-array kinds... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing String in kinds... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Timestamp Field Validation ===
|
||||
Testing Valid since timestamp... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Valid until timestamp... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing String since timestamp... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Negative timestamp... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Limit Field Validation ===
|
||||
Testing Valid limit... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Zero limit... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing String limit... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Negative limit... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Multiple Filters ===
|
||||
Testing Two valid filters... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Many filters... [0;32mPASSED[0m - Input accepted correctly
|
||||
|
||||
=== Test Results ===
|
||||
Total tests: 47
|
||||
Passed: 47
|
||||
Failed: 0
|
||||
[0;32m✓ All input validation tests passed![0m
|
||||
The relay properly validates input.
|
||||
2025-10-11 13:48:42 - \033[0;32m✓ Input Validation Tests PASSED\033[0m (Duration: 4s)
|
||||
2025-10-11 13:48:42 -
|
||||
2025-10-11 13:48:42 - \033[0;34m=== PERFORMANCE TEST SUITES ===\033[0m
|
||||
2025-10-11 13:48:42 - ==========================================
|
||||
2025-10-11 13:48:42 - Running Test Suite: Subscription Limit Tests
|
||||
2025-10-11 13:48:42 - Description: Subscription limit enforcement testing
|
||||
2025-10-11 13:48:42 - ==========================================
|
||||
=== Subscription Limit Test ===
|
||||
[INFO] Testing relay at: ws://127.0.0.1:8888
|
||||
[INFO] Note: This test assumes default subscription limits (max 25 per client)
|
||||
|
||||
=== Test 1: Basic Connectivity ===
|
||||
[INFO] Testing basic WebSocket connection...
|
||||
[PASS] Basic connectivity works
|
||||
|
||||
=== Test 2: Subscription Limit Enforcement ===
|
||||
[INFO] Testing subscription limits by creating multiple subscriptions...
|
||||
[INFO] Creating multiple subscriptions within a single connection...
|
||||
[INFO] Hit subscription limit at subscription 26
|
||||
[PASS] Subscription limit enforcement working (limit hit after 25 subscriptions)
|
||||
|
||||
=== Test Complete ===
|
||||
2025-10-11 13:48:42 - \033[0;32m✓ Subscription Limit Tests PASSED\033[0m (Duration: 0s)
|
||||
2025-10-11 13:48:42 - ==========================================
|
||||
2025-10-11 13:48:42 - Running Test Suite: Load Testing
|
||||
2025-10-11 13:48:42 - Description: High concurrent connection testing
|
||||
2025-10-11 13:48:42 - ==========================================
|
||||
==========================================
|
||||
C-Relay Load Testing Suite
|
||||
==========================================
|
||||
Testing against relay at ws://127.0.0.1:8888
|
||||
|
||||
=== Basic Connectivity Test ===
|
||||
[0;31m✗ Cannot connect to relay. Aborting tests.[0m
|
||||
2025-10-11 13:48:47 - \033[0;31m✗ Load Testing FAILED\033[0m (Duration: 5s)
|
||||
@@ -1,728 +0,0 @@
|
||||
2025-10-11 14:11:34 - ==========================================
|
||||
2025-10-11 14:11:34 - C-Relay Comprehensive Test Suite Runner
|
||||
2025-10-11 14:11:34 - ==========================================
|
||||
2025-10-11 14:11:34 - Relay URL: ws://127.0.0.1:8888
|
||||
2025-10-11 14:11:34 - Log file: test_results_20251011_141134.log
|
||||
2025-10-11 14:11:34 - Report file: test_report_20251011_141134.html
|
||||
2025-10-11 14:11:34 -
|
||||
2025-10-11 14:11:34 - Checking relay status at ws://127.0.0.1:8888...
|
||||
2025-10-11 14:11:34 - \033[0;32m✓ Relay HTTP endpoint is accessible\033[0m
|
||||
2025-10-11 14:11:34 -
|
||||
2025-10-11 14:11:34 - Starting comprehensive test execution...
|
||||
2025-10-11 14:11:34 -
|
||||
2025-10-11 14:11:34 - \033[0;34m=== SECURITY TEST SUITES ===\033[0m
|
||||
2025-10-11 14:11:34 - ==========================================
|
||||
2025-10-11 14:11:34 - Running Test Suite: SQL Injection Tests
|
||||
2025-10-11 14:11:34 - Description: Comprehensive SQL injection vulnerability testing
|
||||
2025-10-11 14:11:34 - ==========================================
|
||||
==========================================
|
||||
C-Relay SQL Injection Test Suite
|
||||
==========================================
|
||||
Testing against relay at ws://127.0.0.1:8888
|
||||
|
||||
=== Basic Connectivity Test ===
|
||||
Testing Basic connectivity... [0;32mPASSED[0m - Valid query works
|
||||
|
||||
=== Authors Filter SQL Injection Tests ===
|
||||
Testing Authors filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Authors filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== IDs Filter SQL Injection Tests ===
|
||||
Testing IDs filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing IDs filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Kinds Filter SQL Injection Tests ===
|
||||
Testing Kinds filter with string injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Kinds filter with negative value... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Kinds filter with very large value... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Search Filter SQL Injection Tests ===
|
||||
Testing Search filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing Search filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing Search filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing Search filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing Search filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Search filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Tag Filter SQL Injection Tests ===
|
||||
Testing #e tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #e tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #p tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #t tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #r tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing #d tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
|
||||
=== Timestamp Filter SQL Injection Tests ===
|
||||
Testing Since parameter injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Until parameter injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Limit Parameter SQL Injection Tests ===
|
||||
Testing Limit parameter injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Limit with UNION... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Complex Multi-Filter SQL Injection Tests ===
|
||||
Testing Multi-filter with authors injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Multi-filter with search injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Multi-filter with tag injection... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
|
||||
=== COUNT Message SQL Injection Tests ===
|
||||
Testing COUNT with authors payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing COUNT with authors payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: #... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing COUNT with authors payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing COUNT with authors payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
||||
Testing COUNT with authors payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with authors payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing COUNT with search payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Edge Case SQL Injection Tests ===
|
||||
Testing Empty string injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Null byte injection... [0;32mPASSED[0m - SQL injection blocked (silently rejected)
|
||||
Testing Unicode injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Very long injection payload... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Subscription ID SQL Injection Tests ===
|
||||
Testing Subscription ID injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
Testing Subscription ID with quotes... [0;32mPASSED[0m - SQL injection blocked (silently rejected)
|
||||
|
||||
=== CLOSE Message SQL Injection Tests ===
|
||||
Testing CLOSE with injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
||||
|
||||
=== Test Results ===
|
||||
Total tests: 318
|
||||
Passed: [0;32m318[0m
|
||||
Failed: [0;31m0[0m
|
||||
[0;32m✓ All SQL injection tests passed![0m
|
||||
The relay appears to be protected against SQL injection attacks.
|
||||
2025-10-11 14:11:56 - \033[0;32m✓ SQL Injection Tests PASSED\033[0m (Duration: 22s)
|
||||
2025-10-11 14:11:56 - ==========================================
|
||||
2025-10-11 14:11:56 - Running Test Suite: Filter Validation Tests
|
||||
2025-10-11 14:11:56 - Description: Input validation for REQ and COUNT messages
|
||||
2025-10-11 14:11:56 - ==========================================
|
||||
=== C-Relay Filter Validation Tests ===
|
||||
Testing against relay at ws://127.0.0.1:8888
|
||||
|
||||
Testing Valid REQ message... [0;32mPASSED[0m
|
||||
Testing Valid COUNT message... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Filter Array Validation ===
|
||||
Testing Non-object filter... [0;32mPASSED[0m
|
||||
Testing Too many filters... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Authors Validation ===
|
||||
Testing Invalid author type... [0;32mPASSED[0m
|
||||
Testing Invalid author hex... [0;32mPASSED[0m
|
||||
Testing Too many authors... [0;32mPASSED[0m
|
||||
|
||||
=== Testing IDs Validation ===
|
||||
Testing Invalid ID type... [0;32mPASSED[0m
|
||||
Testing Invalid ID hex... [0;32mPASSED[0m
|
||||
Testing Too many IDs... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Kinds Validation ===
|
||||
Testing Invalid kind type... [0;32mPASSED[0m
|
||||
Testing Negative kind... [0;32mPASSED[0m
|
||||
Testing Too large kind... [0;32mPASSED[0m
|
||||
Testing Too many kinds... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Timestamp Validation ===
|
||||
Testing Invalid since type... [0;32mPASSED[0m
|
||||
Testing Negative since... [0;32mPASSED[0m
|
||||
Testing Invalid until type... [0;32mPASSED[0m
|
||||
Testing Negative until... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Limit Validation ===
|
||||
Testing Invalid limit type... [0;32mPASSED[0m
|
||||
Testing Negative limit... [0;32mPASSED[0m
|
||||
Testing Too large limit... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Search Validation ===
|
||||
Testing Invalid search type... [0;32mPASSED[0m
|
||||
Testing Search too long... [0;32mPASSED[0m
|
||||
Testing Search SQL injection... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Tag Filter Validation ===
|
||||
Testing Invalid tag filter type... [0;32mPASSED[0m
|
||||
Testing Too many tag values... [0;32mPASSED[0m
|
||||
Testing Tag value too long... [0;32mPASSED[0m
|
||||
|
||||
=== Testing Rate Limiting ===
|
||||
Testing rate limiting with malformed requests... [1;33mUNCERTAIN[0m - Rate limiting may not have triggered (this could be normal)
|
||||
|
||||
=== Test Results ===
|
||||
Total tests: 28
|
||||
Passed: [0;32m28[0m
|
||||
Failed: [0;31m0[0m
|
||||
[0;32mAll tests passed![0m
|
||||
2025-10-11 14:12:02 - \033[0;32m✓ Filter Validation Tests PASSED\033[0m (Duration: 6s)
|
||||
2025-10-11 14:12:02 - ==========================================
|
||||
2025-10-11 14:12:02 - Running Test Suite: Subscription Validation Tests
|
||||
2025-10-11 14:12:02 - Description: Subscription ID and message validation
|
||||
2025-10-11 14:12:02 - ==========================================
|
||||
Testing subscription ID validation fixes...
|
||||
Testing malformed subscription IDs...
|
||||
Valid ID test: Success
|
||||
Testing CLOSE message validation...
|
||||
CLOSE valid ID test: Success
|
||||
Subscription validation tests completed.
|
||||
2025-10-11 14:12:02 - \033[0;32m✓ Subscription Validation Tests PASSED\033[0m (Duration: 0s)
|
||||
2025-10-11 14:12:02 - ==========================================
|
||||
2025-10-11 14:12:02 - Running Test Suite: Memory Corruption Tests
|
||||
2025-10-11 14:12:02 - Description: Buffer overflow and memory safety testing
|
||||
2025-10-11 14:12:02 - ==========================================
|
||||
==========================================
|
||||
C-Relay Memory Corruption Test Suite
|
||||
==========================================
|
||||
Testing against relay at ws://127.0.0.1:8888
|
||||
Note: These tests may cause the relay to crash if vulnerabilities exist
|
||||
|
||||
=== Basic Connectivity Test ===
|
||||
Testing Basic connectivity... [0;32mPASSED[0m - No memory corruption detected
|
||||
|
||||
=== Subscription ID Memory Corruption Tests ===
|
||||
Testing Empty subscription ID... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Very long subscription ID (1KB)... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Very long subscription ID (10KB)... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Subscription ID with null bytes... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Subscription ID with special chars... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Unicode subscription ID... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Subscription ID with path traversal... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
|
||||
=== Filter Array Memory Corruption Tests ===
|
||||
Testing Too many filters (50)... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
|
||||
=== Concurrent Access Memory Tests ===
|
||||
Testing Concurrent subscription creation... ["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
||||
[0;32mPASSED[0m - Concurrent access handled safely
|
||||
Testing Concurrent CLOSE operations...
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
[0;32mPASSED[0m - Concurrent access handled safely
|
||||
|
||||
=== Malformed JSON Memory Tests ===
|
||||
Testing Unclosed JSON object... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Mismatched brackets... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Extra closing brackets... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Null bytes in JSON... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
|
||||
=== Large Message Memory Tests ===
|
||||
Testing Very large filter array... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
Testing Very long search term... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
||||
|
||||
=== Test Results ===
|
||||
Total tests: 17
|
||||
Passed: [0;32m17[0m
|
||||
Failed: [0;31m0[0m
|
||||
[0;32m✓ All memory corruption tests passed![0m
|
||||
The relay appears to handle memory safely.
|
||||
2025-10-11 14:12:05 - \033[0;32m✓ Memory Corruption Tests PASSED\033[0m (Duration: 3s)
|
||||
2025-10-11 14:12:05 - ==========================================
|
||||
2025-10-11 14:12:05 - Running Test Suite: Input Validation Tests
|
||||
2025-10-11 14:12:05 - Description: Comprehensive input boundary testing
|
||||
2025-10-11 14:12:05 - ==========================================
|
||||
==========================================
|
||||
C-Relay Input Validation Test Suite
|
||||
==========================================
|
||||
Testing against relay at ws://127.0.0.1:8888
|
||||
|
||||
=== Basic Connectivity Test ===
|
||||
Testing Basic connectivity... [0;32mPASSED[0m - Input accepted correctly
|
||||
|
||||
=== Message Type Validation ===
|
||||
Testing Invalid message type - string... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Invalid message type - number... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Invalid message type - null... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Invalid message type - object... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Empty message type... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Very long message type... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Message Structure Validation ===
|
||||
Testing Too few arguments... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Too many arguments... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Non-array message... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Empty array... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Nested arrays incorrectly... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Subscription ID Boundary Tests ===
|
||||
Testing Valid subscription ID... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Empty subscription ID... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Subscription ID with spaces... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Subscription ID with newlines... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Subscription ID with tabs... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Subscription ID with control chars... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Unicode subscription ID... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Very long subscription ID... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Filter Object Validation ===
|
||||
Testing Valid empty filter... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Non-object filter... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Null filter... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Array filter... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Filter with invalid keys... [0;32mPASSED[0m - Input accepted correctly
|
||||
|
||||
=== Authors Field Validation ===
|
||||
Testing Valid authors array... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Empty authors array... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Non-array authors... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Invalid hex in authors... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Short pubkey in authors... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== IDs Field Validation ===
|
||||
Testing Valid ids array... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Empty ids array... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Non-array ids... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Kinds Field Validation ===
|
||||
Testing Valid kinds array... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Empty kinds array... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Non-array kinds... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing String in kinds... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Timestamp Field Validation ===
|
||||
Testing Valid since timestamp... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Valid until timestamp... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing String since timestamp... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Negative timestamp... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Limit Field Validation ===
|
||||
Testing Valid limit... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Zero limit... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing String limit... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
Testing Negative limit... [0;32mPASSED[0m - Invalid input properly rejected
|
||||
|
||||
=== Multiple Filters ===
|
||||
Testing Two valid filters... [0;32mPASSED[0m - Input accepted correctly
|
||||
Testing Many filters... [0;32mPASSED[0m - Input accepted correctly
|
||||
|
||||
=== Test Results ===
|
||||
Total tests: 47
|
||||
Passed: 47
|
||||
Failed: 0
|
||||
[0;32m✓ All input validation tests passed![0m
|
||||
The relay properly validates input.
|
||||
2025-10-11 14:12:08 - \033[0;32m✓ Input Validation Tests PASSED\033[0m (Duration: 3s)
|
||||
2025-10-11 14:12:08 -
|
||||
2025-10-11 14:12:08 - \033[0;34m=== PERFORMANCE TEST SUITES ===\033[0m
|
||||
2025-10-11 14:12:08 - ==========================================
|
||||
2025-10-11 14:12:08 - Running Test Suite: Subscription Limit Tests
|
||||
2025-10-11 14:12:08 - Description: Subscription limit enforcement testing
|
||||
2025-10-11 14:12:08 - ==========================================
|
||||
=== Subscription Limit Test ===
|
||||
[INFO] Testing relay at: ws://127.0.0.1:8888
|
||||
[INFO] Note: This test assumes default subscription limits (max 25 per client)
|
||||
|
||||
=== Test 1: Basic Connectivity ===
|
||||
[INFO] Testing basic WebSocket connection...
|
||||
[PASS] Basic connectivity works
|
||||
|
||||
=== Test 2: Subscription Limit Enforcement ===
|
||||
[INFO] Testing subscription limits by creating multiple subscriptions...
|
||||
[INFO] Creating multiple subscriptions within a single connection...
|
||||
[INFO] Hit subscription limit at subscription 26
|
||||
[PASS] Subscription limit enforcement working (limit hit after 25 subscriptions)
|
||||
|
||||
=== Test Complete ===
|
||||
2025-10-11 14:12:09 - \033[0;32m✓ Subscription Limit Tests PASSED\033[0m (Duration: 1s)
|
||||
2025-10-11 14:12:09 - ==========================================
|
||||
2025-10-11 14:12:09 - Running Test Suite: Load Testing
|
||||
2025-10-11 14:12:09 - Description: High concurrent connection testing
|
||||
2025-10-11 14:12:09 - ==========================================
|
||||
==========================================
|
||||
C-Relay Load Testing Suite
|
||||
==========================================
|
||||
Testing against relay at ws://127.0.0.1:8888
|
||||
|
||||
=== Basic Connectivity Test ===
|
||||
[0;32m✓ Relay is accessible[0m
|
||||
|
||||
==========================================
|
||||
Load Test: Light Load Test
|
||||
Description: Basic load test with moderate concurrent connections
|
||||
Concurrent clients: 10
|
||||
Messages per client: 5
|
||||
==========================================
|
||||
Launching 10 clients...
|
||||
All clients completed. Processing results...
|
||||
|
||||
=== Load Test Results ===
|
||||
Test duration: 1s
|
||||
Total connections attempted: 10
|
||||
Successful connections: 10
|
||||
Failed connections: 0
|
||||
Connection success rate: 100%
|
||||
Messages expected: 50
|
||||
Messages sent: 50
|
||||
Messages received: 260
|
||||
[0;32m✓ EXCELLENT: High connection success rate[0m
|
||||
|
||||
Checking relay responsiveness... [0;32m✓ Relay is still responsive[0m
|
||||
|
||||
==========================================
|
||||
Load Test: Medium Load Test
|
||||
Description: Moderate load test with higher concurrency
|
||||
Concurrent clients: 25
|
||||
Messages per client: 10
|
||||
==========================================
|
||||
Launching 25 clients...
|
||||
All clients completed. Processing results...
|
||||
|
||||
=== Load Test Results ===
|
||||
Test duration: 3s
|
||||
Total connections attempted: 35
|
||||
Successful connections: 25
|
||||
Failed connections: 0
|
||||
Connection success rate: 71%
|
||||
Messages expected: 250
|
||||
Messages sent: 250
|
||||
Messages received: 1275
|
||||
[0;31m✗ POOR: Low connection success rate[0m
|
||||
|
||||
Checking relay responsiveness... [0;32m✓ Relay is still responsive[0m
|
||||
|
||||
==========================================
|
||||
Load Test: Heavy Load Test
|
||||
Description: Heavy load test with high concurrency
|
||||
Concurrent clients: 50
|
||||
Messages per client: 20
|
||||
==========================================
|
||||
Launching 50 clients...
|
||||
All clients completed. Processing results...
|
||||
|
||||
=== Load Test Results ===
|
||||
Test duration: 13s
|
||||
Total connections attempted: 85
|
||||
Successful connections: 50
|
||||
Failed connections: 0
|
||||
Connection success rate: 58%
|
||||
Messages expected: 1000
|
||||
Messages sent: 1000
|
||||
Messages received: 5050
|
||||
[0;31m✗ POOR: Low connection success rate[0m
|
||||
|
||||
Checking relay responsiveness... [0;32m✓ Relay is still responsive[0m
|
||||
|
||||
==========================================
|
||||
Load Test: Stress Test
|
||||
Description: Maximum load test to find breaking point
|
||||
Concurrent clients: 100
|
||||
Messages per client: 50
|
||||
==========================================
|
||||
Launching 100 clients...
|
||||
All clients completed. Processing results...
|
||||
|
||||
=== Load Test Results ===
|
||||
Test duration: 63s
|
||||
Total connections attempted: 185
|
||||
Successful connections: 100
|
||||
Failed connections: 0
|
||||
Connection success rate: 54%
|
||||
Messages expected: 5000
|
||||
Messages sent: 5000
|
||||
Messages received: 15100
|
||||
[0;31m✗ POOR: Low connection success rate[0m
|
||||
|
||||
Checking relay responsiveness... [0;32m✓ Relay is still responsive[0m
|
||||
|
||||
==========================================
|
||||
Load Testing Complete
|
||||
==========================================
|
||||
All load tests completed. Check individual test results above.
|
||||
If any tests failed, the relay may need optimization or have resource limits.
|
||||
2025-10-11 14:13:31 - \033[0;32m✓ Load Testing PASSED\033[0m (Duration: 82s)
|
||||
2025-10-11 14:13:31 - ==========================================
|
||||
2025-10-11 14:13:31 - Running Test Suite: Stress Testing
|
||||
2025-10-11 14:13:31 - Description: Resource usage and stability testing
|
||||
2025-10-11 14:13:31 - ==========================================
|
||||
2025-10-11 14:13:31 - \033[0;31mERROR: Test script stress_tests.sh not found\033[0m
|
||||