Compare commits
24 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
079fb1b0f5 | ||
|
|
17b2aa8111 | ||
|
|
78d484cfe0 | ||
|
|
182e12817d | ||
|
|
9179d57cc9 | ||
|
|
9cb9b746d8 | ||
|
|
57a0089664 | ||
|
|
53f7608872 | ||
|
|
838ce5b45a | ||
|
|
e878b9557e | ||
|
|
6638d37d6f | ||
|
|
4c29e15329 | ||
|
|
48890a2121 | ||
|
|
e312d7e18c | ||
|
|
6c38aaebf3 | ||
|
|
18b0ac44bf | ||
|
|
b6749eff2f | ||
|
|
c73a103280 | ||
|
|
a5d194f730 | ||
|
|
6320436b88 | ||
|
|
87325927ed | ||
|
|
4435cdf5b6 | ||
|
|
b041654611 | ||
|
|
e833dcefd4 |
6
.gitmodules
vendored
6
.gitmodules
vendored
@@ -1,3 +1,9 @@
|
|||||||
[submodule "nostr_core_lib"]
|
[submodule "nostr_core_lib"]
|
||||||
path = nostr_core_lib
|
path = nostr_core_lib
|
||||||
url = https://git.laantungir.net/laantungir/nostr_core_lib.git
|
url = https://git.laantungir.net/laantungir/nostr_core_lib.git
|
||||||
|
[submodule "c_utils_lib"]
|
||||||
|
path = c_utils_lib
|
||||||
|
url = ssh://git@git.laantungir.net:2222/laantungir/c_utils_lib.git
|
||||||
|
[submodule "text_graph"]
|
||||||
|
path = text_graph
|
||||||
|
url = ssh://git@git.laantungir.net:2222/laantungir/text_graph.git
|
||||||
|
|||||||
@@ -2,6 +2,6 @@
|
|||||||
description: "Brief description of what this command does"
|
description: "Brief description of what this command does"
|
||||||
---
|
---
|
||||||
|
|
||||||
Run build_and_push.sh, and supply a good git commit message. For example:
|
Run increment_and_push.sh, and supply a good git commit message. For example:
|
||||||
|
|
||||||
./build_and_push.sh "Fixed the bug with nip05 implementation"
|
./increment_and_push.sh "Fixed the bug with nip05 implementation"
|
||||||
@@ -121,8 +121,8 @@ fuser -k 8888/tcp
|
|||||||
- Event filtering done at C level, not SQL level for NIP-40 expiration
|
- Event filtering done at C level, not SQL level for NIP-40 expiration
|
||||||
|
|
||||||
### Configuration Override Behavior
|
### Configuration Override Behavior
|
||||||
- CLI port override only affects first-time startup
|
- CLI port override applies during first-time startup and existing relay restarts
|
||||||
- After database creation, all config comes from events
|
- After database creation, all config comes from events (but CLI overrides can still be applied)
|
||||||
- Database path cannot be changed after initialization
|
- Database path cannot be changed after initialization
|
||||||
|
|
||||||
## Non-Obvious Pitfalls
|
## Non-Obvious Pitfalls
|
||||||
|
|||||||
@@ -1,8 +1,13 @@
|
|||||||
# Alpine-based MUSL static binary builder for C-Relay
|
# Alpine-based MUSL static binary builder for C-Relay
|
||||||
# Produces truly portable binaries with zero runtime dependencies
|
# Produces truly portable binaries with zero runtime dependencies
|
||||||
|
|
||||||
|
ARG DEBUG_BUILD=false
|
||||||
|
|
||||||
FROM alpine:3.19 AS builder
|
FROM alpine:3.19 AS builder
|
||||||
|
|
||||||
|
# Re-declare build argument in this stage
|
||||||
|
ARG DEBUG_BUILD=false
|
||||||
|
|
||||||
# Install build dependencies
|
# Install build dependencies
|
||||||
RUN apk add --no-cache \
|
RUN apk add --no-cache \
|
||||||
build-base \
|
build-base \
|
||||||
@@ -76,6 +81,15 @@ RUN git submodule update --init --recursive
|
|||||||
# Copy nostr_core_lib source files (cached unless nostr_core_lib changes)
|
# Copy nostr_core_lib source files (cached unless nostr_core_lib changes)
|
||||||
COPY nostr_core_lib /build/nostr_core_lib/
|
COPY nostr_core_lib /build/nostr_core_lib/
|
||||||
|
|
||||||
|
# Copy c_utils_lib source files (cached unless c_utils_lib changes)
|
||||||
|
COPY c_utils_lib /build/c_utils_lib/
|
||||||
|
|
||||||
|
# Build c_utils_lib with MUSL-compatible flags (cached unless c_utils_lib changes)
|
||||||
|
RUN cd c_utils_lib && \
|
||||||
|
sed -i 's/CFLAGS = -Wall -Wextra -std=c99 -O2 -g/CFLAGS = -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -Wall -Wextra -std=c99 -O2 -g/' Makefile && \
|
||||||
|
make clean && \
|
||||||
|
make
|
||||||
|
|
||||||
# Build nostr_core_lib with required NIPs (cached unless nostr_core_lib changes)
|
# Build nostr_core_lib with required NIPs (cached unless nostr_core_lib changes)
|
||||||
# Disable fortification in build.sh to prevent __*_chk symbol issues
|
# Disable fortification in build.sh to prevent __*_chk symbol issues
|
||||||
# NIPs: 001(Basic), 006(Keys), 013(PoW), 017(DMs), 019(Bech32), 044(Encryption), 059(Gift Wrap - required by NIP-17)
|
# NIPs: 001(Basic), 006(Keys), 013(PoW), 017(DMs), 019(Bech32), 044(Encryption), 059(Gift Wrap - required by NIP-17)
|
||||||
@@ -91,20 +105,29 @@ COPY Makefile /build/Makefile
|
|||||||
|
|
||||||
# Build c-relay with full static linking (only rebuilds when src/ changes)
|
# Build c-relay with full static linking (only rebuilds when src/ changes)
|
||||||
# Disable fortification to avoid __*_chk symbols that don't exist in MUSL
|
# Disable fortification to avoid __*_chk symbols that don't exist in MUSL
|
||||||
RUN gcc -static -O2 -Wall -Wextra -std=c99 \
|
# Use conditional compilation flags based on DEBUG_BUILD argument
|
||||||
|
RUN if [ "$DEBUG_BUILD" = "true" ]; then \
|
||||||
|
CFLAGS="-g -O0 -DDEBUG"; \
|
||||||
|
STRIP_CMD=""; \
|
||||||
|
echo "Building with DEBUG symbols enabled"; \
|
||||||
|
else \
|
||||||
|
CFLAGS="-O2"; \
|
||||||
|
STRIP_CMD="strip /build/c_relay_static"; \
|
||||||
|
echo "Building optimized production binary"; \
|
||||||
|
fi && \
|
||||||
|
gcc -static $CFLAGS -Wall -Wextra -std=c99 \
|
||||||
-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 \
|
-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 \
|
||||||
-I. -Inostr_core_lib -Inostr_core_lib/nostr_core \
|
-I. -Ic_utils_lib/src -Inostr_core_lib -Inostr_core_lib/nostr_core \
|
||||||
-Inostr_core_lib/cjson -Inostr_core_lib/nostr_websocket \
|
-Inostr_core_lib/cjson -Inostr_core_lib/nostr_websocket \
|
||||||
src/main.c src/config.c src/debug.c src/dm_admin.c src/request_validator.c \
|
src/main.c src/config.c src/dm_admin.c src/request_validator.c \
|
||||||
src/nip009.c src/nip011.c src/nip013.c src/nip040.c src/nip042.c \
|
src/nip009.c src/nip011.c src/nip013.c src/nip040.c src/nip042.c \
|
||||||
src/websockets.c src/subscriptions.c src/api.c src/embedded_web_content.c \
|
src/websockets.c src/subscriptions.c src/api.c src/embedded_web_content.c \
|
||||||
-o /build/c_relay_static \
|
-o /build/c_relay_static \
|
||||||
|
c_utils_lib/libc_utils.a \
|
||||||
nostr_core_lib/libnostr_core_x64.a \
|
nostr_core_lib/libnostr_core_x64.a \
|
||||||
-lwebsockets -lssl -lcrypto -lsqlite3 -lsecp256k1 \
|
-lwebsockets -lssl -lcrypto -lsqlite3 -lsecp256k1 \
|
||||||
-lcurl -lz -lpthread -lm -ldl
|
-lcurl -lz -lpthread -lm -ldl && \
|
||||||
|
eval "$STRIP_CMD"
|
||||||
# Strip binary to reduce size
|
|
||||||
RUN strip /build/c_relay_static
|
|
||||||
|
|
||||||
# Verify it's truly static
|
# Verify it's truly static
|
||||||
RUN echo "=== Binary Information ===" && \
|
RUN echo "=== Binary Information ===" && \
|
||||||
|
|||||||
27
Makefile
27
Makefile
@@ -2,15 +2,16 @@
|
|||||||
|
|
||||||
CC = gcc
|
CC = gcc
|
||||||
CFLAGS = -Wall -Wextra -std=c99 -g -O2
|
CFLAGS = -Wall -Wextra -std=c99 -g -O2
|
||||||
INCLUDES = -I. -Inostr_core_lib -Inostr_core_lib/nostr_core -Inostr_core_lib/cjson -Inostr_core_lib/nostr_websocket
|
INCLUDES = -I. -Ic_utils_lib/src -Inostr_core_lib -Inostr_core_lib/nostr_core -Inostr_core_lib/cjson -Inostr_core_lib/nostr_websocket
|
||||||
LIBS = -lsqlite3 -lwebsockets -lz -ldl -lpthread -lm -L/usr/local/lib -lsecp256k1 -lssl -lcrypto -L/usr/local/lib -lcurl
|
LIBS = -lsqlite3 -lwebsockets -lz -ldl -lpthread -lm -L/usr/local/lib -lsecp256k1 -lssl -lcrypto -L/usr/local/lib -lcurl -Lc_utils_lib -lc_utils
|
||||||
|
|
||||||
# Build directory
|
# Build directory
|
||||||
BUILD_DIR = build
|
BUILD_DIR = build
|
||||||
|
|
||||||
# Source files
|
# Source files
|
||||||
MAIN_SRC = src/main.c src/config.c src/debug.c src/dm_admin.c src/request_validator.c src/nip009.c src/nip011.c src/nip013.c src/nip040.c src/nip042.c src/websockets.c src/subscriptions.c src/api.c src/embedded_web_content.c
|
MAIN_SRC = src/main.c src/config.c src/dm_admin.c src/request_validator.c src/nip009.c src/nip011.c src/nip013.c src/nip040.c src/nip042.c src/websockets.c src/subscriptions.c src/api.c src/embedded_web_content.c
|
||||||
NOSTR_CORE_LIB = nostr_core_lib/libnostr_core_x64.a
|
NOSTR_CORE_LIB = nostr_core_lib/libnostr_core_x64.a
|
||||||
|
C_UTILS_LIB = c_utils_lib/libc_utils.a
|
||||||
|
|
||||||
# Architecture detection
|
# Architecture detection
|
||||||
ARCH = $(shell uname -m)
|
ARCH = $(shell uname -m)
|
||||||
@@ -38,6 +39,11 @@ $(NOSTR_CORE_LIB):
|
|||||||
@echo "Building nostr_core_lib with required NIPs (including NIP-44 for encryption)..."
|
@echo "Building nostr_core_lib with required NIPs (including NIP-44 for encryption)..."
|
||||||
cd nostr_core_lib && ./build.sh --nips=1,6,13,17,19,44,59
|
cd nostr_core_lib && ./build.sh --nips=1,6,13,17,19,44,59
|
||||||
|
|
||||||
|
# Check if c_utils_lib is built
|
||||||
|
$(C_UTILS_LIB):
|
||||||
|
@echo "Building c_utils_lib..."
|
||||||
|
cd c_utils_lib && ./build.sh lib
|
||||||
|
|
||||||
# Update main.h version information (requires main.h to exist)
|
# Update main.h version information (requires main.h to exist)
|
||||||
src/main.h:
|
src/main.h:
|
||||||
@if [ ! -f src/main.h ]; then \
|
@if [ ! -f src/main.h ]; then \
|
||||||
@@ -75,18 +81,18 @@ force-version:
|
|||||||
@$(MAKE) src/main.h
|
@$(MAKE) src/main.h
|
||||||
|
|
||||||
# Build the relay
|
# Build the relay
|
||||||
$(TARGET): $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
$(TARGET): $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB) $(C_UTILS_LIB)
|
||||||
@echo "Compiling C-Relay for architecture: $(ARCH)"
|
@echo "Compiling C-Relay for architecture: $(ARCH)"
|
||||||
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(TARGET) $(NOSTR_CORE_LIB) $(LIBS)
|
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(TARGET) $(NOSTR_CORE_LIB) $(C_UTILS_LIB) $(LIBS)
|
||||||
@echo "Build complete: $(TARGET)"
|
@echo "Build complete: $(TARGET)"
|
||||||
|
|
||||||
# Build for specific architectures
|
# Build for specific architectures
|
||||||
x86: $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
x86: $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB) $(C_UTILS_LIB)
|
||||||
@echo "Building C-Relay for x86_64..."
|
@echo "Building C-Relay for x86_64..."
|
||||||
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(BUILD_DIR)/c_relay_x86 $(NOSTR_CORE_LIB) $(LIBS)
|
$(CC) $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(BUILD_DIR)/c_relay_x86 $(NOSTR_CORE_LIB) $(C_UTILS_LIB) $(LIBS)
|
||||||
@echo "Build complete: $(BUILD_DIR)/c_relay_x86"
|
@echo "Build complete: $(BUILD_DIR)/c_relay_x86"
|
||||||
|
|
||||||
arm64: $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
arm64: $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB) $(C_UTILS_LIB)
|
||||||
@echo "Cross-compiling C-Relay for ARM64..."
|
@echo "Cross-compiling C-Relay for ARM64..."
|
||||||
@if ! command -v aarch64-linux-gnu-gcc >/dev/null 2>&1; then \
|
@if ! command -v aarch64-linux-gnu-gcc >/dev/null 2>&1; then \
|
||||||
echo "ERROR: ARM64 cross-compiler not found."; \
|
echo "ERROR: ARM64 cross-compiler not found."; \
|
||||||
@@ -110,7 +116,7 @@ arm64: $(BUILD_DIR) src/main.h src/sql_schema.h $(MAIN_SRC) $(NOSTR_CORE_LIB)
|
|||||||
fi
|
fi
|
||||||
@echo "Using aarch64-linux-gnu-gcc with ARM64 libraries..."
|
@echo "Using aarch64-linux-gnu-gcc with ARM64 libraries..."
|
||||||
PKG_CONFIG_PATH=/usr/lib/aarch64-linux-gnu/pkgconfig:/usr/share/pkgconfig \
|
PKG_CONFIG_PATH=/usr/lib/aarch64-linux-gnu/pkgconfig:/usr/share/pkgconfig \
|
||||||
aarch64-linux-gnu-gcc $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(BUILD_DIR)/c_relay_arm64 $(NOSTR_CORE_LIB) \
|
aarch64-linux-gnu-gcc $(CFLAGS) $(INCLUDES) $(MAIN_SRC) -o $(BUILD_DIR)/c_relay_arm64 $(NOSTR_CORE_LIB) $(C_UTILS_LIB) \
|
||||||
-L/usr/lib/aarch64-linux-gnu $(LIBS)
|
-L/usr/lib/aarch64-linux-gnu $(LIBS)
|
||||||
@echo "Build complete: $(BUILD_DIR)/c_relay_arm64"
|
@echo "Build complete: $(BUILD_DIR)/c_relay_arm64"
|
||||||
|
|
||||||
@@ -161,9 +167,10 @@ clean:
|
|||||||
rm -rf $(BUILD_DIR)
|
rm -rf $(BUILD_DIR)
|
||||||
@echo "Clean complete"
|
@echo "Clean complete"
|
||||||
|
|
||||||
# Clean everything including nostr_core_lib
|
# Clean everything including nostr_core_lib and c_utils_lib
|
||||||
clean-all: clean
|
clean-all: clean
|
||||||
cd nostr_core_lib && make clean 2>/dev/null || true
|
cd nostr_core_lib && make clean 2>/dev/null || true
|
||||||
|
cd c_utils_lib && make clean 2>/dev/null || true
|
||||||
|
|
||||||
# Install dependencies (Ubuntu/Debian)
|
# Install dependencies (Ubuntu/Debian)
|
||||||
install-deps:
|
install-deps:
|
||||||
|
|||||||
62
README.md
62
README.md
@@ -164,6 +164,8 @@ All commands are sent as NIP-44 encrypted JSON arrays in the event content. The
|
|||||||
| `system_clear_auth` | `["system_command", "clear_all_auth_rules"]` | Clear all auth rules |
|
| `system_clear_auth` | `["system_command", "clear_all_auth_rules"]` | Clear all auth rules |
|
||||||
| `system_status` | `["system_command", "system_status"]` | Get system status |
|
| `system_status` | `["system_command", "system_status"]` | Get system status |
|
||||||
| `stats_query` | `["stats_query"]` | Get comprehensive database statistics |
|
| `stats_query` | `["stats_query"]` | Get comprehensive database statistics |
|
||||||
|
| **Database Queries** |
|
||||||
|
| `sql_query` | `["sql_query", "SELECT * FROM events LIMIT 10"]` | Execute read-only SQL query against relay database |
|
||||||
|
|
||||||
### Available Configuration Keys
|
### Available Configuration Keys
|
||||||
|
|
||||||
@@ -320,8 +322,68 @@ All admin commands return **signed EVENT responses** via WebSocket following sta
|
|||||||
],
|
],
|
||||||
"sig": "response_event_signature"
|
"sig": "response_event_signature"
|
||||||
}]
|
}]
|
||||||
|
```
|
||||||
|
|
||||||
|
**SQL Query Response:**
|
||||||
|
```json
|
||||||
|
["EVENT", "temp_sub_id", {
|
||||||
|
"id": "response_event_id",
|
||||||
|
"pubkey": "relay_public_key",
|
||||||
|
"created_at": 1234567890,
|
||||||
|
"kind": 23457,
|
||||||
|
"content": "nip44 encrypted:{\"query_type\": \"sql_query\", \"request_id\": \"request_event_id\", \"timestamp\": 1234567890, \"query\": \"SELECT * FROM events LIMIT 10\", \"execution_time_ms\": 45, \"row_count\": 10, \"columns\": [\"id\", \"pubkey\", \"created_at\", \"kind\", \"content\"], \"rows\": [[\"abc123...\", \"def456...\", 1234567890, 1, \"Hello world\"], ...]}",
|
||||||
|
"tags": [
|
||||||
|
["p", "admin_public_key"],
|
||||||
|
["e", "request_event_id"]
|
||||||
|
],
|
||||||
|
"sig": "response_event_signature"
|
||||||
|
}]
|
||||||
|
```
|
||||||
|
|
||||||
|
### SQL Query Command
|
||||||
|
|
||||||
|
The `sql_query` command allows administrators to execute read-only SQL queries against the relay database. This provides powerful analytics and debugging capabilities through the admin API.
|
||||||
|
|
||||||
|
**Request/Response Correlation:**
|
||||||
|
- Each response includes the request event ID in both the `tags` array (`["e", "request_event_id"]`) and the decrypted content (`"request_id": "request_event_id"`)
|
||||||
|
- This allows proper correlation when multiple queries are submitted concurrently
|
||||||
|
- Frontend can track pending queries and match responses to requests
|
||||||
|
|
||||||
|
**Security Features:**
|
||||||
|
- Only SELECT statements allowed (INSERT, UPDATE, DELETE, DROP, etc. are blocked)
|
||||||
|
- Query timeout: 5 seconds (configurable)
|
||||||
|
- Result row limit: 1000 rows (configurable)
|
||||||
|
- All queries logged with execution time
|
||||||
|
|
||||||
|
**Available Tables and Views:**
|
||||||
|
- `events` - All Nostr events
|
||||||
|
- `config` - Configuration parameters
|
||||||
|
- `auth_rules` - Authentication rules
|
||||||
|
- `subscription_events` - Subscription lifecycle log
|
||||||
|
- `event_broadcasts` - Event broadcast log
|
||||||
|
- `recent_events` - Last 1000 events (view)
|
||||||
|
- `event_stats` - Event statistics by type (view)
|
||||||
|
- `subscription_analytics` - Subscription metrics (view)
|
||||||
|
- `active_subscriptions_log` - Currently active subscriptions (view)
|
||||||
|
- `event_kinds_view` - Event distribution by kind (view)
|
||||||
|
- `top_pubkeys_view` - Top 10 pubkeys by event count (view)
|
||||||
|
- `time_stats_view` - Time-based statistics (view)
|
||||||
|
|
||||||
|
**Example Queries:**
|
||||||
|
```sql
|
||||||
|
-- Recent events
|
||||||
|
SELECT id, pubkey, created_at, kind FROM events ORDER BY created_at DESC LIMIT 20
|
||||||
|
|
||||||
|
-- Event distribution by kind
|
||||||
|
SELECT * FROM event_kinds_view ORDER BY count DESC
|
||||||
|
|
||||||
|
-- Active subscriptions
|
||||||
|
SELECT * FROM active_subscriptions_log ORDER BY created_at DESC
|
||||||
|
|
||||||
|
-- Database statistics
|
||||||
|
SELECT
|
||||||
|
(SELECT COUNT(*) FROM events) as total_events,
|
||||||
|
(SELECT COUNT(*) FROM subscription_events) as total_subscriptions
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
58
api/embedded.html
Normal file
58
api/embedded.html
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>Embedded NOSTR_LOGIN_LITE</title>
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
|
||||||
|
margin: 0;
|
||||||
|
padding: 40px;
|
||||||
|
background: white;
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
min-height: 100vh;
|
||||||
|
}
|
||||||
|
|
||||||
|
.container {
|
||||||
|
max-width: 400px;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
#login-container {
|
||||||
|
/* No styling - let embedded modal blend seamlessly */
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="container">
|
||||||
|
<div id="login-container"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script src="../lite/nostr.bundle.js"></script>
|
||||||
|
<script src="../lite/nostr-lite.js"></script>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
document.addEventListener('DOMContentLoaded', async () => {
|
||||||
|
await window.NOSTR_LOGIN_LITE.init({
|
||||||
|
theme:'default',
|
||||||
|
methods: {
|
||||||
|
extension: true,
|
||||||
|
local: true,
|
||||||
|
seedphrase: true,
|
||||||
|
readonly: true,
|
||||||
|
connect: true,
|
||||||
|
remote: true,
|
||||||
|
otp: true
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
window.NOSTR_LOGIN_LITE.embed('#login-container', {
|
||||||
|
seamless: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
815
api/index.css
815
api/index.css
@@ -6,7 +6,7 @@
|
|||||||
--muted-color: #dddddd;
|
--muted-color: #dddddd;
|
||||||
--border-color: var(--muted-color);
|
--border-color: var(--muted-color);
|
||||||
--font-family: "Courier New", Courier, monospace;
|
--font-family: "Courier New", Courier, monospace;
|
||||||
--border-radius: 15px;
|
--border-radius: 5px;
|
||||||
--border-width: 1px;
|
--border-width: 1px;
|
||||||
|
|
||||||
/* Floating Tab Variables (8) */
|
/* Floating Tab Variables (8) */
|
||||||
@@ -22,6 +22,23 @@
|
|||||||
--tab-border-opacity-logged-in: 0.1;
|
--tab-border-opacity-logged-in: 0.1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Dark Mode Overrides */
|
||||||
|
body.dark-mode {
|
||||||
|
--primary-color: #ffffff;
|
||||||
|
--secondary-color: #000000;
|
||||||
|
--accent-color: #ff0000;
|
||||||
|
--muted-color: #222222;
|
||||||
|
--border-color: var(--muted-color);
|
||||||
|
|
||||||
|
|
||||||
|
--tab-bg-logged-out: #000000;
|
||||||
|
--tab-color-logged-out: #ffffff;
|
||||||
|
--tab-border-logged-out: #ffffff;
|
||||||
|
--tab-bg-logged-in: #000000;
|
||||||
|
--tab-color-logged-in: #ffffff;
|
||||||
|
--tab-border-logged-in: #00ffff;
|
||||||
|
}
|
||||||
|
|
||||||
* {
|
* {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
@@ -33,11 +50,237 @@ body {
|
|||||||
background-color: var(--secondary-color);
|
background-color: var(--secondary-color);
|
||||||
color: var(--primary-color);
|
color: var(--primary-color);
|
||||||
/* line-height: 1.4; */
|
/* line-height: 1.4; */
|
||||||
padding: 20px;
|
padding: 0;
|
||||||
|
max-width: none;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Header Styles */
|
||||||
|
.main-header {
|
||||||
|
background-color: var(--secondary-color);
|
||||||
|
|
||||||
|
padding: 15px 20px;
|
||||||
|
z-index: 100;
|
||||||
max-width: 1200px;
|
max-width: 1200px;
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.header-content {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-title {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 24px;
|
||||||
|
font-weight: normal;
|
||||||
|
color: var(--primary-color);
|
||||||
|
border: none;
|
||||||
|
padding: 0;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.relay-info {
|
||||||
|
text-align: center;
|
||||||
|
flex: 1;
|
||||||
|
max-width: 150px;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.relay-name {
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: bold;
|
||||||
|
color: var(--primary-color);
|
||||||
|
margin-bottom: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.relay-pubkey-container {
|
||||||
|
border: 1px solid transparent;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
padding: 4px;
|
||||||
|
margin-top: 4px;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: border-color 0.2s ease;
|
||||||
|
background-color: var(--secondary-color);
|
||||||
|
display: inline-block;
|
||||||
|
width: fit-content;
|
||||||
|
}
|
||||||
|
|
||||||
|
.relay-pubkey-container:hover {
|
||||||
|
border-color: var(--border-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.relay-pubkey-container.copied {
|
||||||
|
border-color: var(--accent-color);
|
||||||
|
animation: flash-accent 0.5s ease-in-out;
|
||||||
|
}
|
||||||
|
|
||||||
|
.relay-pubkey {
|
||||||
|
font-size: 8px;
|
||||||
|
color: var(--primary-color);
|
||||||
|
font-family: "Courier New", Courier, monospace;
|
||||||
|
line-height: 1.2;
|
||||||
|
white-space: pre-line;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes flash-accent {
|
||||||
|
0% { border-color: var(--accent-color); }
|
||||||
|
50% { border-color: var(--accent-color); }
|
||||||
|
100% { border-color: transparent; }
|
||||||
|
}
|
||||||
|
|
||||||
|
.relay-description {
|
||||||
|
font-size: 10px;
|
||||||
|
color: var(--primary-color);
|
||||||
|
margin-bottom: 0;
|
||||||
|
display: inline-block;
|
||||||
|
width: fit-content;
|
||||||
|
word-wrap: break-word;
|
||||||
|
overflow-wrap: break-word;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-title {
|
||||||
|
margin: 0;
|
||||||
|
font-size: 24px;
|
||||||
|
font-weight: bolder;
|
||||||
|
color: var(--primary-color);
|
||||||
|
border: none;
|
||||||
|
padding: 0;
|
||||||
|
text-align: left;
|
||||||
|
display: flex;
|
||||||
|
gap: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.relay-letter {
|
||||||
|
position: relative;
|
||||||
|
display: inline-block;
|
||||||
|
transition: all 0.05s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.relay-letter.underlined::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
bottom: -2px;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
height: 2px;
|
||||||
|
background-color: var(--accent-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-user-name {
|
||||||
|
display: block;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--primary-color);
|
||||||
|
font-size: 10px;
|
||||||
|
text-align: center;
|
||||||
|
margin-top: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.profile-area {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
position: relative;
|
||||||
|
cursor: pointer;
|
||||||
|
padding: 8px 12px;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
transition: background-color 0.2s ease;
|
||||||
|
/* margin-left: auto; */
|
||||||
|
}
|
||||||
|
|
||||||
|
.admin-label {
|
||||||
|
font-size: 10px;
|
||||||
|
color: var(--primary-color);
|
||||||
|
font-weight: normal;
|
||||||
|
margin-bottom: 4px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.profile-container {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
gap: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.profile-area:hover {
|
||||||
|
background-color: rgba(0, 0, 0, 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.profile-info {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-user-image {
|
||||||
|
width: 48px; /* 50% larger than 32px */
|
||||||
|
height: 48px; /* 50% larger than 32px */
|
||||||
|
border-radius: var(--border-radius); /* Curved corners like other elements */
|
||||||
|
object-fit: cover;
|
||||||
|
border: 2px solid transparent; /* Invisible border */
|
||||||
|
background-color: var(--secondary-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.logout-dropdown {
|
||||||
|
position: absolute;
|
||||||
|
top: 100%;
|
||||||
|
right: 0;
|
||||||
|
background-color: var(--secondary-color);
|
||||||
|
border: var(--border-width) solid var(--border-color);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
|
||||||
|
min-width: 120px;
|
||||||
|
z-index: 200;
|
||||||
|
margin-top: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.logout-btn {
|
||||||
|
width: 100%;
|
||||||
|
padding: 5px 10px;
|
||||||
|
background: none;
|
||||||
|
border: none;
|
||||||
|
color: var(--primary-color);
|
||||||
|
text-align: left;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 10px;
|
||||||
|
font-family: var(--font-family);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
transition: background-color 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.logout-btn:hover {
|
||||||
|
background-color: rgba(0, 0, 0, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Login Modal Styles */
|
||||||
|
.login-modal-overlay {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
background-color: rgba(0, 0, 0, 0.8);
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
z-index: 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-modal-content {
|
||||||
|
background-color: var(--secondary-color);
|
||||||
|
border: var(--border-width) solid var(--border-color);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
padding: 30px;
|
||||||
|
max-width: 400px;
|
||||||
|
width: 90%;
|
||||||
|
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
h1 {
|
h1 {
|
||||||
border-bottom: var(--border-width) solid var(--border-color);
|
border-bottom: var(--border-width) solid var(--border-color);
|
||||||
padding-bottom: 10px;
|
padding-bottom: 10px;
|
||||||
@@ -62,6 +305,8 @@ h2 {
|
|||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
padding: 20px;
|
padding: 20px;
|
||||||
margin-bottom: 20px;
|
margin-bottom: 20px;
|
||||||
|
margin-left: 5px;
|
||||||
|
margin-right:5px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.input-group {
|
.input-group {
|
||||||
@@ -124,10 +369,44 @@ button:active {
|
|||||||
}
|
}
|
||||||
|
|
||||||
button:disabled {
|
button:disabled {
|
||||||
background-color: #ccc;
|
background-color: var(--muted-color);
|
||||||
color: var(--muted-color);
|
color: var(--primary-color);
|
||||||
cursor: not-allowed;
|
cursor: not-allowed;
|
||||||
border-color: #ccc;
|
border-color: var(--muted-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Flash animation for refresh button */
|
||||||
|
@keyframes flash-red {
|
||||||
|
0% { border-color: var(--border-color); }
|
||||||
|
50% { border-color: var(--accent-color); }
|
||||||
|
100% { border-color: var(--border-color); }
|
||||||
|
}
|
||||||
|
|
||||||
|
.flash-red {
|
||||||
|
animation: flash-red 1s ease-in-out;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Flash animation for updated statistics values */
|
||||||
|
@keyframes flash-value {
|
||||||
|
0% { color: var(--primary-color); }
|
||||||
|
50% { color: var(--accent-color); }
|
||||||
|
100% { color: var(--primary-color); }
|
||||||
|
}
|
||||||
|
|
||||||
|
.flash-value {
|
||||||
|
animation: flash-value 1s ease-in-out;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Npub links styling */
|
||||||
|
.npub-link {
|
||||||
|
color: var(--primary-color);
|
||||||
|
text-decoration: none;
|
||||||
|
font-weight: normal;
|
||||||
|
transition: color 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.npub-link:hover {
|
||||||
|
color: var(--accent-color);
|
||||||
}
|
}
|
||||||
|
|
||||||
.status {
|
.status {
|
||||||
@@ -161,6 +440,7 @@ button:disabled {
|
|||||||
border-color: var(--accent-color);
|
border-color: var(--accent-color);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
.config-table {
|
.config-table {
|
||||||
border: 1px solid var(--border-color);
|
border: 1px solid var(--border-color);
|
||||||
border-radius: var(--border-radius);
|
border-radius: var(--border-radius);
|
||||||
@@ -180,6 +460,10 @@ button:disabled {
|
|||||||
font-size: 10px;
|
font-size: 10px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.config-table tbody tr:hover {
|
||||||
|
background-color: rgba(0, 0, 0, 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
.config-table-container {
|
.config-table-container {
|
||||||
overflow-x: auto;
|
overflow-x: auto;
|
||||||
max-width: 100%;
|
max-width: 100%;
|
||||||
@@ -187,12 +471,13 @@ button:disabled {
|
|||||||
|
|
||||||
.config-table th {
|
.config-table th {
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
height: 40px; /* Double the default height */
|
height: 24px; /* Base height for tbody rows */
|
||||||
line-height: 40px; /* Center text vertically */
|
line-height: 24px; /* Center text vertically */
|
||||||
}
|
}
|
||||||
|
|
||||||
.config-table tr:hover {
|
.config-table td {
|
||||||
background-color: var(--muted-color);
|
height: 16px; /* 50% taller than tbody rows would be */
|
||||||
|
line-height: 16px; /* Center text vertically */
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Inline config value inputs - remove borders and padding to fit seamlessly in table cells */
|
/* Inline config value inputs - remove borders and padding to fit seamlessly in table cells */
|
||||||
@@ -218,9 +503,13 @@ button:disabled {
|
|||||||
.config-actions-cell {
|
.config-actions-cell {
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
transition: all 0.2s ease;
|
transition: all 0.2s ease;
|
||||||
text-align: center;
|
text-align: center !important;
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
vertical-align: middle;
|
vertical-align: middle;
|
||||||
|
width: 60px;
|
||||||
|
min-width: 60px;
|
||||||
|
max-width: 60px;
|
||||||
|
padding: 8px 4px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.config-actions-cell:hover {
|
.config-actions-cell:hover {
|
||||||
@@ -266,6 +555,7 @@ button:disabled {
|
|||||||
.inline-buttons {
|
.inline-buttons {
|
||||||
display: flex;
|
display: flex;
|
||||||
gap: 10px;
|
gap: 10px;
|
||||||
|
flex-wrap: nowrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
.inline-buttons button {
|
.inline-buttons button {
|
||||||
@@ -282,12 +572,21 @@ button:disabled {
|
|||||||
|
|
||||||
.user-info-container {
|
.user-info-container {
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: flex-start;
|
flex-direction: column;
|
||||||
gap: 20px;
|
gap: 15px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.user-details {
|
.user-details {
|
||||||
flex: 1;
|
order: -1; /* Show user details first when logged in */
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-section {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.logout-section {
|
||||||
|
display: flex;
|
||||||
|
justify-content: flex-end;
|
||||||
}
|
}
|
||||||
|
|
||||||
.login-logout-btn {
|
.login-logout-btn {
|
||||||
@@ -334,6 +633,31 @@ button:disabled {
|
|||||||
margin: 5px 0;
|
margin: 5px 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* User profile header with image */
|
||||||
|
.user-profile-header {
|
||||||
|
display: flex;
|
||||||
|
align-items: flex-start;
|
||||||
|
gap: 15px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-image-container {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-profile-image {
|
||||||
|
width: 60px;
|
||||||
|
height: 60px;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
object-fit: cover;
|
||||||
|
border: 2px solid var(--border-color);
|
||||||
|
background-color: var(--bg-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-text-info {
|
||||||
|
flex: 1;
|
||||||
|
min-width: 0; /* Allow text to wrap */
|
||||||
|
}
|
||||||
|
|
||||||
.hidden {
|
.hidden {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
@@ -342,9 +666,43 @@ button:disabled {
|
|||||||
display: flex;
|
display: flex;
|
||||||
justify-content: space-between;
|
justify-content: space-between;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
margin-bottom: 15px;
|
/* margin-bottom: 15px; */
|
||||||
border-bottom: var(--border-width) solid var(--border-color);
|
/* border-bottom: var(--border-width) solid var(--border-color); */
|
||||||
padding-bottom: 10px;
|
/* padding-bottom: 10px; */
|
||||||
|
}
|
||||||
|
|
||||||
|
.countdown-btn {
|
||||||
|
width: auto;
|
||||||
|
min-width: 40px;
|
||||||
|
padding: 8px 12px;
|
||||||
|
background: var(--secondary-color);
|
||||||
|
color: var(--primary-color);
|
||||||
|
border: var(--border-width) solid var(--border-color);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
font-family: var(--font-family);
|
||||||
|
font-size: 10px;
|
||||||
|
/* font-weight: bold; */
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
margin-left: auto;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.countdown-btn:hover::after {
|
||||||
|
content: "countdown";
|
||||||
|
position: absolute;
|
||||||
|
top: -30px;
|
||||||
|
left: 50%;
|
||||||
|
transform: translateX(-50%);
|
||||||
|
background: var(--primary-color);
|
||||||
|
color: var(--secondary-color);
|
||||||
|
padding: 4px 8px;
|
||||||
|
border-radius: 4px;
|
||||||
|
font-size: 12px;
|
||||||
|
font-weight: normal;
|
||||||
|
white-space: nowrap;
|
||||||
|
z-index: 1000;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
}
|
}
|
||||||
|
|
||||||
.auth-rules-controls {
|
.auth-rules-controls {
|
||||||
@@ -458,33 +816,416 @@ button:disabled {
|
|||||||
transition: all 0.2s ease;
|
transition: all 0.2s ease;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Main Sections Wrapper */
|
/* SQL Query Interface Styles */
|
||||||
.main-sections-wrapper {
|
.query-selector {
|
||||||
|
margin-bottom: 15px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-selector select {
|
||||||
|
width: 100%;
|
||||||
|
padding: 8px;
|
||||||
|
background: var(--secondary-color);
|
||||||
|
color: var(--primary-color);
|
||||||
|
border: var(--border-width) solid var(--border-color);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
font-family: var(--font-family);
|
||||||
|
font-size: 14px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-selector select:focus {
|
||||||
|
border-color: var(--accent-color);
|
||||||
|
outline: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-selector optgroup {
|
||||||
|
font-weight: bold;
|
||||||
|
color: var(--primary-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-selector option {
|
||||||
|
padding: 4px;
|
||||||
|
background: var(--secondary-color);
|
||||||
|
color: var(--primary-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-editor textarea {
|
||||||
|
width: 100%;
|
||||||
|
min-height: 120px;
|
||||||
|
resize: vertical;
|
||||||
|
font-family: "Courier New", Courier, monospace;
|
||||||
|
font-size: 12px;
|
||||||
|
line-height: 1.4;
|
||||||
|
tab-size: 4;
|
||||||
|
white-space: pre;
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-actions {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-wrap: wrap;
|
gap: 10px;
|
||||||
gap: var(--border-width);
|
margin-top: 10px;
|
||||||
margin-bottom: 20px;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.flex-section {
|
.query-actions button {
|
||||||
flex: 1;
|
flex: 1;
|
||||||
min-width: 300px;
|
min-width: 120px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@media (max-width: 700px) {
|
.primary-button {
|
||||||
body {
|
background: var(--primary-color);
|
||||||
padding: 10px;
|
color: var(--secondary-color);
|
||||||
}
|
border-color: var(--primary-color);
|
||||||
|
}
|
||||||
|
|
||||||
.inline-buttons {
|
.primary-button:hover {
|
||||||
flex-direction: column;
|
background: var(--secondary-color);
|
||||||
}
|
color: var(--primary-color);
|
||||||
|
border-color: var(--accent-color);
|
||||||
|
}
|
||||||
|
|
||||||
h1 {
|
.danger-button {
|
||||||
font-size: 20px;
|
background: var(--accent-color);
|
||||||
}
|
color: var(--secondary-color);
|
||||||
|
border-color: var(--accent-color);
|
||||||
|
}
|
||||||
|
|
||||||
h2 {
|
.danger-button:hover {
|
||||||
font-size: 14px;
|
background: var(--secondary-color);
|
||||||
}
|
color: var(--primary-color);
|
||||||
}
|
border-color: var(--accent-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-info {
|
||||||
|
padding: 10px;
|
||||||
|
border: var(--border-width) solid var(--border-color);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
margin: 10px 0;
|
||||||
|
font-family: var(--font-family);
|
||||||
|
font-size: 12px;
|
||||||
|
background-color: var(--secondary-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-info-success {
|
||||||
|
border-color: #4CAF50;
|
||||||
|
background-color: #E8F5E8;
|
||||||
|
color: #2E7D32;
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-info-success span {
|
||||||
|
display: inline-block;
|
||||||
|
margin-right: 15px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.request-id {
|
||||||
|
font-family: "Courier New", Courier, monospace;
|
||||||
|
font-size: 10px;
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-message {
|
||||||
|
border-color: var(--accent-color);
|
||||||
|
background-color: #FFEBEE;
|
||||||
|
color: #C62828;
|
||||||
|
padding: 10px;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
margin: 10px 0;
|
||||||
|
font-family: var(--font-family);
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sql-results-table {
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: separate;
|
||||||
|
border-spacing: 0;
|
||||||
|
margin: 10px 0;
|
||||||
|
overflow: hidden;
|
||||||
|
font-size: 11px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sql-results-table th,
|
||||||
|
.sql-results-table td {
|
||||||
|
border: 0.1px solid var(--muted-color);
|
||||||
|
padding: 6px 8px;
|
||||||
|
text-align: left;
|
||||||
|
font-family: var(--font-family);
|
||||||
|
max-width: 200px;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sql-results-table th {
|
||||||
|
font-weight: bold;
|
||||||
|
background-color: rgba(0, 0, 0, 0.05);
|
||||||
|
position: sticky;
|
||||||
|
top: 0;
|
||||||
|
z-index: 10;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sql-results-table tbody tr:hover {
|
||||||
|
background-color: rgba(0, 0, 0, 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.sql-results-table tbody tr:nth-child(even) {
|
||||||
|
background-color: rgba(0, 0, 0, 0.02);
|
||||||
|
}
|
||||||
|
|
||||||
|
.no-results {
|
||||||
|
text-align: center;
|
||||||
|
font-style: italic;
|
||||||
|
color: var(--muted-color);
|
||||||
|
padding: 20px;
|
||||||
|
font-family: var(--font-family);
|
||||||
|
}
|
||||||
|
|
||||||
|
.loading {
|
||||||
|
text-align: center;
|
||||||
|
font-style: italic;
|
||||||
|
color: var(--muted-color);
|
||||||
|
padding: 20px;
|
||||||
|
font-family: var(--font-family);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dark mode adjustments for SQL interface */
|
||||||
|
body.dark-mode .query-info-success {
|
||||||
|
border-color: #4CAF50;
|
||||||
|
background-color: rgba(76, 175, 80, 0.1);
|
||||||
|
color: #81C784;
|
||||||
|
}
|
||||||
|
|
||||||
|
body.dark-mode .error-message {
|
||||||
|
border-color: var(--accent-color);
|
||||||
|
background-color: rgba(244, 67, 54, 0.1);
|
||||||
|
color: #EF5350;
|
||||||
|
}
|
||||||
|
|
||||||
|
body.dark-mode .sql-results-table th {
|
||||||
|
background-color: rgba(255, 255, 255, 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
body.dark-mode .sql-results-table tbody tr:hover {
|
||||||
|
background-color: rgba(255, 255, 255, 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
body.dark-mode .sql-results-table tbody tr:nth-child(even) {
|
||||||
|
background-color: rgba(255, 255, 255, 0.02);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Config Toggle Button Styles */
|
||||||
|
.config-toggle-btn {
|
||||||
|
width: 24px;
|
||||||
|
height: 24px;
|
||||||
|
padding: 0;
|
||||||
|
background: var(--secondary-color);
|
||||||
|
border: var(--border-width) solid var(--border-color);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
font-family: var(--font-family);
|
||||||
|
font-size: 14px;
|
||||||
|
cursor: pointer;
|
||||||
|
margin-left: 10px;
|
||||||
|
font-weight: bold;
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Toggle Button Styles */
|
||||||
|
.toggle-btn {
|
||||||
|
width: auto;
|
||||||
|
min-width: 120px;
|
||||||
|
padding: 8px 12px;
|
||||||
|
background: var(--secondary-color);
|
||||||
|
color: var(--primary-color);
|
||||||
|
border: var(--border-width) solid var(--border-color);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
font-family: var(--font-family);
|
||||||
|
font-size: 12px;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
margin-left: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.toggle-btn:hover {
|
||||||
|
border-color: var(--accent-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.toggle-btn:active {
|
||||||
|
background: var(--accent-color);
|
||||||
|
color: var(--secondary-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-toggle-btn:hover {
|
||||||
|
border-color: var(--accent-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-toggle-btn:active {
|
||||||
|
background: var(--accent-color);
|
||||||
|
color: var(--secondary-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-toggle-btn[data-state="true"] {
|
||||||
|
color: var(--accent-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-toggle-btn[data-state="false"] {
|
||||||
|
color: var(--primary-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-toggle-btn[data-state="indeterminate"] {
|
||||||
|
background-color: var(--muted-color);
|
||||||
|
color: var(--primary-color);
|
||||||
|
cursor: not-allowed;
|
||||||
|
border-color: var(--muted-color);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* ================================
|
||||||
|
REAL-TIME EVENT RATE CHART
|
||||||
|
================================ */
|
||||||
|
|
||||||
|
.chart-container {
|
||||||
|
margin: 20px 0;
|
||||||
|
padding: 15px;
|
||||||
|
background: var(--secondary-color);
|
||||||
|
border: var(--border-width) solid var(--border-color);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
}
|
||||||
|
|
||||||
|
#event-rate-chart {
|
||||||
|
font-family: var(--font-family);
|
||||||
|
font-size: 12px;
|
||||||
|
line-height: 1.2;
|
||||||
|
color: var(--primary-color);
|
||||||
|
background: var(--secondary-color);
|
||||||
|
padding: 20px;
|
||||||
|
overflow: hidden;
|
||||||
|
white-space: pre;
|
||||||
|
border: var(--border-width) solid var(--border-color);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ================================
|
||||||
|
SIDE NAVIGATION MENU
|
||||||
|
================================ */
|
||||||
|
|
||||||
|
.side-nav {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: -300px;
|
||||||
|
width: 280px;
|
||||||
|
height: 100vh;
|
||||||
|
background: var(--secondary-color);
|
||||||
|
border-right: var(--border-width) solid var(--border-color);
|
||||||
|
z-index: 1000;
|
||||||
|
transition: left 0.3s ease;
|
||||||
|
overflow-y: auto;
|
||||||
|
padding-top: 80px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.side-nav.open {
|
||||||
|
left: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.side-nav-overlay {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
background: rgba(0, 0, 0, 0.5);
|
||||||
|
z-index: 999;
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.side-nav-overlay.show {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-menu {
|
||||||
|
list-style: none;
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-menu li {
|
||||||
|
border-bottom: var(--border-width) solid var(--muted-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-menu li:last-child {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-item {
|
||||||
|
display: block;
|
||||||
|
padding: 15px 20px;
|
||||||
|
color: var(--primary-color);
|
||||||
|
text-decoration: none;
|
||||||
|
font-family: var(--font-family);
|
||||||
|
font-size: 16px;
|
||||||
|
font-weight: bold;
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
cursor: pointer;
|
||||||
|
border: 2px solid var(--secondary-color);
|
||||||
|
background: none;
|
||||||
|
width: 100%;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-item:hover {
|
||||||
|
border: 2px solid var(--secondary-color);
|
||||||
|
background:var(--muted-color);
|
||||||
|
color: var(--accent-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-item.active {
|
||||||
|
text-decoration: underline;
|
||||||
|
padding-left: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-footer {
|
||||||
|
position: absolute;
|
||||||
|
bottom: 20px;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
padding: 0 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-footer-btn {
|
||||||
|
display: block;
|
||||||
|
width: 100%;
|
||||||
|
padding: 12px 20px;
|
||||||
|
margin-bottom: 8px;
|
||||||
|
color: var(--primary-color);
|
||||||
|
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 4px;
|
||||||
|
font-family: var(--font-family);
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: bold;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-footer-btn:hover {
|
||||||
|
background:var(--muted-color);
|
||||||
|
border-color: var(--accent-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-footer-btn:last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-title.clickable {
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-title.clickable:hover {
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
|||||||
447
api/index.html
447
api/index.html
@@ -4,90 +4,232 @@
|
|||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<title>C-Relay Admin API</title>
|
<title>C-Relay Admin</title>
|
||||||
<link rel="stylesheet" href="/api/index.css">
|
<link rel="stylesheet" href="/api/index.css">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<h1>C-RELAY ADMIN API</h1>
|
<!-- Side Navigation Menu -->
|
||||||
|
<nav class="side-nav" id="side-nav">
|
||||||
|
<ul class="nav-menu">
|
||||||
|
<li><button class="nav-item" data-page="statistics">Statistics</button></li>
|
||||||
|
<li><button class="nav-item" data-page="subscriptions">Subscriptions</button></li>
|
||||||
|
<li><button class="nav-item" data-page="configuration">Configuration</button></li>
|
||||||
|
<li><button class="nav-item" data-page="authorization">Authorization</button></li>
|
||||||
|
<li><button class="nav-item" data-page="dm">DM</button></li>
|
||||||
|
<li><button class="nav-item" data-page="database">Database Query</button></li>
|
||||||
|
</ul>
|
||||||
|
<div class="nav-footer">
|
||||||
|
<button class="nav-footer-btn" id="nav-dark-mode-btn">DARK MODE</button>
|
||||||
|
<button class="nav-footer-btn" id="nav-logout-btn">LOGOUT</button>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
|
||||||
<!-- Main Sections Wrapper -->
|
<!-- Side Navigation Overlay -->
|
||||||
<div class="main-sections-wrapper">
|
<div class="side-nav-overlay" id="side-nav-overlay"></div>
|
||||||
|
|
||||||
<!-- Persistent Authentication Header - Always Visible -->
|
<!-- Header with title and profile display -->
|
||||||
<div id="persistent-auth-container" class="section flex-section">
|
<div class="section">
|
||||||
<div class="user-info-container">
|
|
||||||
<button type="button" id="login-logout-btn" class="login-logout-btn">LOGIN</button>
|
<div class="header-content">
|
||||||
<div class="user-details" id="persistent-user-details" style="display: none;">
|
<div class="header-title clickable" id="header-title">
|
||||||
<div><strong>Name:</strong> <span id="persistent-user-name">Loading...</span></div>
|
<span class="relay-letter" data-letter="R">R</span>
|
||||||
<div><strong>Public Key:</strong>
|
<span class="relay-letter" data-letter="E">E</span>
|
||||||
<div class="user-pubkey" id="persistent-user-pubkey">Loading...</div>
|
<span class="relay-letter" data-letter="L">L</span>
|
||||||
|
<span class="relay-letter" data-letter="A">A</span>
|
||||||
|
<span class="relay-letter" data-letter="Y">Y</span>
|
||||||
|
</div>
|
||||||
|
<div class="relay-info">
|
||||||
|
<div id="relay-name" class="relay-name">C-Relay</div>
|
||||||
|
<div id="relay-description" class="relay-description">Loading...</div>
|
||||||
|
<div id="relay-pubkey-container" class="relay-pubkey-container">
|
||||||
|
<div id="relay-pubkey" class="relay-pubkey">Loading...</div>
|
||||||
</div>
|
</div>
|
||||||
<div><strong>About:</strong> <span id="persistent-user-about">Loading...</span></div>
|
|
||||||
</div>
|
</div>
|
||||||
|
<div class="profile-area" id="profile-area" style="display: none;">
|
||||||
|
<div class="admin-label">admin</div>
|
||||||
|
<div class="profile-container">
|
||||||
|
<img id="header-user-image" class="header-user-image" alt="Profile" style="display: none;">
|
||||||
|
<span id="header-user-name" class="header-user-name">Loading...</span>
|
||||||
|
</div>
|
||||||
|
<!-- Logout dropdown -->
|
||||||
|
<!-- Dropdown menu removed - buttons moved to sidebar -->
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Login Modal Overlay -->
|
||||||
|
<div id="login-modal" class="login-modal-overlay" style="display: none;">
|
||||||
|
<div class="login-modal-content">
|
||||||
|
<div id="login-modal-container"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- DATABASE STATISTICS Section -->
|
||||||
|
<div class="section flex-section" id="databaseStatisticsSection" style="display: none;">
|
||||||
|
<div class="section-header">
|
||||||
|
<h2>DATABASE STATISTICS</h2>
|
||||||
|
<!-- Monitoring is now subscription-based - no toggle button needed -->
|
||||||
|
<!-- Subscribe to kind 24567 events to receive real-time monitoring data -->
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Event Rate Graph Container -->
|
||||||
|
<div id="event-rate-chart"></div>
|
||||||
|
|
||||||
|
<!-- Database Overview Table -->
|
||||||
|
<div class="input-group">
|
||||||
|
<div class="config-table-container">
|
||||||
|
<table class="config-table" id="stats-overview-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Metric</th>
|
||||||
|
<th>Value</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="stats-overview-table-body">
|
||||||
|
<tr>
|
||||||
|
<td>Database Size</td>
|
||||||
|
<td id="db-size">-</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Total Events</td>
|
||||||
|
<td id="total-events">-</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Process ID</td>
|
||||||
|
<td id="process-id">-</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Active Subscriptions</td>
|
||||||
|
<td id="active-subscriptions">-</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Memory Usage</td>
|
||||||
|
<td id="memory-usage">-</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>CPU Core</td>
|
||||||
|
<td id="cpu-core">-</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>CPU Usage</td>
|
||||||
|
<td id="cpu-usage">-</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Oldest Event</td>
|
||||||
|
<td id="oldest-event">-</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Newest Event</td>
|
||||||
|
<td id="newest-event">-</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Login Section -->
|
<!-- Event Kind Distribution Table -->
|
||||||
<div id="login-section" class="flex-section">
|
<div class="input-group">
|
||||||
<div class="section">
|
<label>Event Kind Distribution:</label>
|
||||||
<h2>NOSTR AUTHENTICATION</h2>
|
<div class="config-table-container">
|
||||||
<p id="login-instructions">Please login with your Nostr identity to access the admin interface.</p>
|
<table class="config-table" id="stats-kinds-table">
|
||||||
<!-- nostr-lite login UI will be injected here -->
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Event Kind</th>
|
||||||
|
<th>Count</th>
|
||||||
|
<th>Percentage</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="stats-kinds-table-body">
|
||||||
|
<tr>
|
||||||
|
<td colspan="3" style="text-align: center; font-style: italic;">No data loaded</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Relay Connection Section -->
|
<!-- Time-based Statistics Table -->
|
||||||
<div id="relay-connection-section" class="flex-section">
|
<div class="input-group">
|
||||||
<div class="section">
|
<label>Time-based Statistics:</label>
|
||||||
<h2>RELAY CONNECTION</h2>
|
<div class="config-table-container">
|
||||||
|
<table class="config-table" id="stats-time-table">
|
||||||
<div class="input-group">
|
<thead>
|
||||||
<label for="relay-connection-url">Relay URL:</label>
|
<tr>
|
||||||
<input type="text" id="relay-connection-url" value=""
|
<th>Period</th>
|
||||||
placeholder="ws://localhost:8888 or wss://relay.example.com">
|
<th>Events</th>
|
||||||
</div>
|
</tr>
|
||||||
|
</thead>
|
||||||
<div class="input-group">
|
<tbody id="stats-time-table-body">
|
||||||
<label for="relay-pubkey-manual">Relay Pubkey (if not available via NIP-11):</label>
|
<tr>
|
||||||
<input type="text" id="relay-pubkey-manual" placeholder="64-character hex pubkey"
|
<td>Last 24 Hours</td>
|
||||||
pattern="[0-9a-fA-F]{64}" title="64-character hexadecimal public key">
|
<td id="events-24h">-</td>
|
||||||
|
</tr>
|
||||||
</div>
|
<tr>
|
||||||
|
<td>Last 7 Days</td>
|
||||||
<div class="inline-buttons">
|
<td id="events-7d">-</td>
|
||||||
<button type="button" id="connect-relay-btn">CONNECT TO RELAY</button>
|
</tr>
|
||||||
<button type="button" id="disconnect-relay-btn" disabled>DISCONNECT</button>
|
<tr>
|
||||||
<button type="button" id="restart-relay-btn" disabled>RESTART RELAY</button>
|
<td>Last 30 Days</td>
|
||||||
</div>
|
<td id="events-30d">-</td>
|
||||||
|
</tr>
|
||||||
<div class="status disconnected" id="relay-connection-status">NOT CONNECTED</div>
|
</tbody>
|
||||||
|
</table>
|
||||||
<!-- Relay Information Display -->
|
|
||||||
<div id="relay-info-display" class="hidden">
|
|
||||||
<h3>Relay Information (NIP-11)</h3>
|
|
||||||
<table class="config-table" id="relay-info-table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Property</th>
|
|
||||||
<th>Value</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody id="relay-info-table-body">
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Top Pubkeys Table -->
|
||||||
|
<div class="input-group">
|
||||||
|
<label>Top Pubkeys by Event Count:</label>
|
||||||
|
<div class="config-table-container">
|
||||||
|
<table class="config-table" id="stats-pubkeys-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Rank</th>
|
||||||
|
<th>Pubkey</th>
|
||||||
|
<th>Event Count</th>
|
||||||
|
<th>Percentage</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="stats-pubkeys-table-body">
|
||||||
|
<tr>
|
||||||
|
<td colspan="4" style="text-align: center; font-style: italic;">No data loaded</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- SUBSCRIPTION DETAILS Section (Admin Only) -->
|
||||||
|
<div class="section flex-section" id="subscriptionDetailsSection" style="display: none;">
|
||||||
|
<div class="section-header">
|
||||||
|
<h2>ACTIVE SUBSCRIPTION DETAILS</h2>
|
||||||
|
</div>
|
||||||
|
|
||||||
</div> <!-- End Main Sections Wrapper -->
|
<div class="input-group">
|
||||||
|
<div class="config-table-container">
|
||||||
|
<table class="config-table" id="subscription-details-table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Subscription ID</th>
|
||||||
|
<th>Client IP</th>
|
||||||
|
<th>WSI Pointer</th>
|
||||||
|
<th>Duration</th>
|
||||||
|
<th>Filters</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="subscription-details-table-body">
|
||||||
|
<tr>
|
||||||
|
<td colspan="5" style="text-align: center; font-style: italic;">No subscriptions active</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<!-- Testing Section -->
|
<!-- Testing Section -->
|
||||||
<div id="div_config" class="section flex-section" style="display: none;">
|
<div id="div_config" class="section flex-section" style="display: none;">
|
||||||
@@ -129,7 +271,6 @@
|
|||||||
<th>Rule Type</th>
|
<th>Rule Type</th>
|
||||||
<th>Pattern Type</th>
|
<th>Pattern Type</th>
|
||||||
<th>Pattern Value</th>
|
<th>Pattern Value</th>
|
||||||
<th>Action</th>
|
|
||||||
<th>Status</th>
|
<th>Status</th>
|
||||||
<th>Actions</th>
|
<th>Actions</th>
|
||||||
</tr>
|
</tr>
|
||||||
@@ -173,132 +314,6 @@
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
<!-- DATABASE STATISTICS Section -->
|
|
||||||
<div class="section" id="databaseStatisticsSection" style="display: none;">
|
|
||||||
<div class="section-header">
|
|
||||||
<h2>DATABASE STATISTICS</h2>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
|
|
||||||
<!-- Database Overview Table -->
|
|
||||||
<div class="input-group">
|
|
||||||
<label>Database Overview:</label>
|
|
||||||
<div class="config-table-container">
|
|
||||||
<table class="config-table" id="stats-overview-table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Metric</th>
|
|
||||||
<th>Value</th>
|
|
||||||
<th>Description</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody id="stats-overview-table-body">
|
|
||||||
<tr>
|
|
||||||
<td>Database Size</td>
|
|
||||||
<td id="db-size">-</td>
|
|
||||||
<td>Current database file size</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Total Events</td>
|
|
||||||
<td id="total-events">-</td>
|
|
||||||
<td>Total number of events stored</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Oldest Event</td>
|
|
||||||
<td id="oldest-event">-</td>
|
|
||||||
<td>Timestamp of oldest event</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Newest Event</td>
|
|
||||||
<td id="newest-event">-</td>
|
|
||||||
<td>Timestamp of newest event</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Event Kind Distribution Table -->
|
|
||||||
<div class="input-group">
|
|
||||||
<label>Event Kind Distribution:</label>
|
|
||||||
<div class="config-table-container">
|
|
||||||
<table class="config-table" id="stats-kinds-table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Event Kind</th>
|
|
||||||
<th>Count</th>
|
|
||||||
<th>Percentage</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody id="stats-kinds-table-body">
|
|
||||||
<tr>
|
|
||||||
<td colspan="3" style="text-align: center; font-style: italic;">No data loaded</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Time-based Statistics Table -->
|
|
||||||
<div class="input-group">
|
|
||||||
<label>Time-based Statistics:</label>
|
|
||||||
<div class="config-table-container">
|
|
||||||
<table class="config-table" id="stats-time-table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Period</th>
|
|
||||||
<th>Events</th>
|
|
||||||
<th>Description</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody id="stats-time-table-body">
|
|
||||||
<tr>
|
|
||||||
<td>Last 24 Hours</td>
|
|
||||||
<td id="events-24h">-</td>
|
|
||||||
<td>Events in the last day</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Last 7 Days</td>
|
|
||||||
<td id="events-7d">-</td>
|
|
||||||
<td>Events in the last week</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>Last 30 Days</td>
|
|
||||||
<td id="events-30d">-</td>
|
|
||||||
<td>Events in the last month</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Top Pubkeys Table -->
|
|
||||||
<div class="input-group">
|
|
||||||
<label>Top Pubkeys by Event Count:</label>
|
|
||||||
<div class="config-table-container">
|
|
||||||
<table class="config-table" id="stats-pubkeys-table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Rank</th>
|
|
||||||
<th>Pubkey</th>
|
|
||||||
<th>Event Count</th>
|
|
||||||
<th>Percentage</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody id="stats-pubkeys-table-body">
|
|
||||||
<tr>
|
|
||||||
<td colspan="4" style="text-align: center; font-style: italic;">No data loaded</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Refresh Button -->
|
|
||||||
<div class="input-group">
|
|
||||||
<button type="button" id="refresh-stats-btn">REFRESH STATISTICS</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- NIP-17 DIRECT MESSAGES Section -->
|
<!-- NIP-17 DIRECT MESSAGES Section -->
|
||||||
<div class="section" id="nip17DMSection" style="display: none;">
|
<div class="section" id="nip17DMSection" style="display: none;">
|
||||||
@@ -326,6 +341,54 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- SQL QUERY Section -->
|
||||||
|
<div class="section" id="sqlQuerySection" style="display: none;">
|
||||||
|
<div class="section-header">
|
||||||
|
<h2>SQL QUERY CONSOLE</h2>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Query Selector -->
|
||||||
|
<div class="input-group">
|
||||||
|
<label for="query-dropdown">Quick Queries & History:</label>
|
||||||
|
<select id="query-dropdown" onchange="loadSelectedQuery()">
|
||||||
|
<option value="">-- Select a query --</option>
|
||||||
|
<optgroup label="Common Queries">
|
||||||
|
<option value="recent_events">Recent Events</option>
|
||||||
|
<option value="event_stats">Event Statistics</option>
|
||||||
|
<option value="subscriptions">Active Subscriptions</option>
|
||||||
|
<option value="top_pubkeys">Top Pubkeys</option>
|
||||||
|
<option value="event_kinds">Event Kinds Distribution</option>
|
||||||
|
<option value="time_stats">Time-based Statistics</option>
|
||||||
|
</optgroup>
|
||||||
|
<optgroup label="Query History" id="history-group">
|
||||||
|
<!-- Dynamically populated from localStorage -->
|
||||||
|
</optgroup>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Query Editor -->
|
||||||
|
<div class="input-group">
|
||||||
|
<label for="sql-input">SQL Query:</label>
|
||||||
|
<textarea id="sql-input" rows="5" placeholder="SELECT * FROM events LIMIT 10"></textarea>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Query Actions -->
|
||||||
|
<div class="input-group">
|
||||||
|
<div class="inline-buttons">
|
||||||
|
<button type="button" id="execute-sql-btn">EXECUTE QUERY</button>
|
||||||
|
<button type="button" id="clear-sql-btn">CLEAR</button>
|
||||||
|
<button type="button" id="clear-history-btn">CLEAR HISTORY</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Query Results -->
|
||||||
|
<div class="input-group">
|
||||||
|
<label>Query Results:</label>
|
||||||
|
<div id="query-info" class="info-box"></div>
|
||||||
|
<div id="query-table" class="config-table-container"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<!-- Load the official nostr-tools bundle first -->
|
<!-- Load the official nostr-tools bundle first -->
|
||||||
<!-- <script src="https://laantungir.net/nostr-login-lite/nostr.bundle.js"></script> -->
|
<!-- <script src="https://laantungir.net/nostr-login-lite/nostr.bundle.js"></script> -->
|
||||||
<script src="/api/nostr.bundle.js"></script>
|
<script src="/api/nostr.bundle.js"></script>
|
||||||
@@ -333,6 +396,8 @@
|
|||||||
<!-- Load NOSTR_LOGIN_LITE main library -->
|
<!-- Load NOSTR_LOGIN_LITE main library -->
|
||||||
<!-- <script src="https://laantungir.net/nostr-login-lite/nostr-lite.js"></script> -->
|
<!-- <script src="https://laantungir.net/nostr-login-lite/nostr-lite.js"></script> -->
|
||||||
<script src="/api/nostr-lite.js"></script>
|
<script src="/api/nostr-lite.js"></script>
|
||||||
|
<!-- Load text_graph library -->
|
||||||
|
<script src="/api/text_graph.js"></script>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
8003
api/index.js
8003
api/index.js
File diff suppressed because it is too large
Load Diff
463
api/text_graph.js
Normal file
463
api/text_graph.js
Normal file
@@ -0,0 +1,463 @@
|
|||||||
|
/**
|
||||||
|
* ASCIIBarChart - A dynamic ASCII-based vertical bar chart renderer
|
||||||
|
*
|
||||||
|
* Creates real-time animated bar charts using monospaced characters (X)
|
||||||
|
* with automatic scaling, labels, and responsive font sizing.
|
||||||
|
*/
|
||||||
|
class ASCIIBarChart {
|
||||||
|
/**
|
||||||
|
* Create a new ASCII bar chart
|
||||||
|
* @param {string} containerId - The ID of the HTML element to render the chart in
|
||||||
|
* @param {Object} options - Configuration options
|
||||||
|
* @param {number} [options.maxHeight=20] - Maximum height of the chart in rows
|
||||||
|
* @param {number} [options.maxDataPoints=30] - Maximum number of data columns before scrolling
|
||||||
|
* @param {string} [options.title=''] - Chart title (displayed centered at top)
|
||||||
|
* @param {string} [options.xAxisLabel=''] - X-axis label (displayed centered at bottom)
|
||||||
|
* @param {string} [options.yAxisLabel=''] - Y-axis label (displayed vertically on left)
|
||||||
|
* @param {boolean} [options.autoFitWidth=true] - Automatically adjust font size to fit container width
|
||||||
|
* @param {boolean} [options.useBinMode=false] - Enable time bin mode for data aggregation
|
||||||
|
* @param {number} [options.binDuration=10000] - Duration of each time bin in milliseconds (10 seconds default)
|
||||||
|
* @param {string} [options.xAxisLabelFormat='elapsed'] - X-axis label format: 'elapsed', 'bins', 'timestamps', 'ranges'
|
||||||
|
* @param {boolean} [options.debug=false] - Enable debug logging
|
||||||
|
*/
|
||||||
|
constructor(containerId, options = {}) {
|
||||||
|
this.container = document.getElementById(containerId);
|
||||||
|
this.data = [];
|
||||||
|
this.maxHeight = options.maxHeight || 20;
|
||||||
|
this.maxDataPoints = options.maxDataPoints || 30;
|
||||||
|
this.totalDataPoints = 0; // Track total number of data points added
|
||||||
|
this.title = options.title || '';
|
||||||
|
this.xAxisLabel = options.xAxisLabel || '';
|
||||||
|
this.yAxisLabel = options.yAxisLabel || '';
|
||||||
|
this.autoFitWidth = options.autoFitWidth !== false; // Default to true
|
||||||
|
this.debug = options.debug || false; // Debug logging option
|
||||||
|
|
||||||
|
// Time bin configuration
|
||||||
|
this.useBinMode = options.useBinMode !== false; // Default to true
|
||||||
|
this.binDuration = options.binDuration || 4000; // 4 seconds default
|
||||||
|
this.xAxisLabelFormat = options.xAxisLabelFormat || 'elapsed';
|
||||||
|
|
||||||
|
// Time bin data structures
|
||||||
|
this.bins = [];
|
||||||
|
this.currentBinIndex = -1;
|
||||||
|
this.binStartTime = null;
|
||||||
|
this.binCheckInterval = null;
|
||||||
|
this.chartStartTime = Date.now();
|
||||||
|
|
||||||
|
// Set up resize observer if auto-fit is enabled
|
||||||
|
if (this.autoFitWidth) {
|
||||||
|
this.resizeObserver = new ResizeObserver(() => {
|
||||||
|
this.adjustFontSize();
|
||||||
|
});
|
||||||
|
this.resizeObserver.observe(this.container);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize first bin if bin mode is enabled
|
||||||
|
if (this.useBinMode) {
|
||||||
|
this.initializeBins();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a new data point to the chart
|
||||||
|
* @param {number} value - The numeric value to add
|
||||||
|
*/
|
||||||
|
addValue(value) {
|
||||||
|
// Time bin mode: add value to current active bin count
|
||||||
|
this.checkBinRotation(); // Ensure we have an active bin
|
||||||
|
this.bins[this.currentBinIndex].count += value; // Changed from ++ to += value
|
||||||
|
this.totalDataPoints++;
|
||||||
|
|
||||||
|
this.render();
|
||||||
|
this.updateInfo();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all data from the chart
|
||||||
|
*/
|
||||||
|
clear() {
|
||||||
|
this.data = [];
|
||||||
|
this.totalDataPoints = 0;
|
||||||
|
|
||||||
|
if (this.useBinMode) {
|
||||||
|
this.bins = [];
|
||||||
|
this.currentBinIndex = -1;
|
||||||
|
this.binStartTime = null;
|
||||||
|
this.initializeBins();
|
||||||
|
}
|
||||||
|
|
||||||
|
this.render();
|
||||||
|
this.updateInfo();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate the width of the chart in characters
|
||||||
|
* @returns {number} The chart width in characters
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
getChartWidth() {
|
||||||
|
let dataLength = this.maxDataPoints; // Always use maxDataPoints for consistent width
|
||||||
|
|
||||||
|
if (dataLength === 0) return 50; // Default width for empty chart
|
||||||
|
|
||||||
|
const yAxisPadding = this.yAxisLabel ? 2 : 0;
|
||||||
|
const yAxisNumbers = 3; // Width of Y-axis numbers
|
||||||
|
const separator = 1; // The '|' character
|
||||||
|
// const dataWidth = dataLength * 2; // Each column is 2 characters wide // TEMP: commented for no-space test
|
||||||
|
const dataWidth = dataLength; // Each column is 1 character wide // TEMP: adjusted for no-space columns
|
||||||
|
const padding = 1; // Extra padding
|
||||||
|
|
||||||
|
const totalWidth = yAxisPadding + yAxisNumbers + separator + dataWidth + padding;
|
||||||
|
|
||||||
|
// Only log when width changes
|
||||||
|
if (this.debug && this.lastChartWidth !== totalWidth) {
|
||||||
|
console.log('getChartWidth changed:', { dataLength, totalWidth, previous: this.lastChartWidth });
|
||||||
|
this.lastChartWidth = totalWidth;
|
||||||
|
}
|
||||||
|
|
||||||
|
return totalWidth;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adjust font size to fit container width
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
adjustFontSize() {
|
||||||
|
if (!this.autoFitWidth) return;
|
||||||
|
|
||||||
|
const containerWidth = this.container.clientWidth;
|
||||||
|
const chartWidth = this.getChartWidth();
|
||||||
|
|
||||||
|
if (chartWidth === 0) return;
|
||||||
|
|
||||||
|
// Calculate optimal font size
|
||||||
|
// For monospace fonts, character width is approximately 0.6 * font size
|
||||||
|
// Use a slightly smaller ratio to fit more content
|
||||||
|
const charWidthRatio = 0.7;
|
||||||
|
const padding = 30; // Reduce padding to fit more content
|
||||||
|
const availableWidth = containerWidth - padding;
|
||||||
|
const optimalFontSize = Math.floor((availableWidth / chartWidth) / charWidthRatio);
|
||||||
|
|
||||||
|
// Set reasonable bounds (min 4px, max 20px)
|
||||||
|
const fontSize = Math.max(4, Math.min(20, optimalFontSize));
|
||||||
|
|
||||||
|
// Only log when font size changes
|
||||||
|
if (this.debug && this.lastFontSize !== fontSize) {
|
||||||
|
console.log('fontSize changed:', { containerWidth, chartWidth, fontSize, previous: this.lastFontSize });
|
||||||
|
this.lastFontSize = fontSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.container.style.fontSize = fontSize + 'px';
|
||||||
|
this.container.style.lineHeight = '1.0';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render the chart to the container
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
render() {
|
||||||
|
let dataToRender = [];
|
||||||
|
let maxValue = 0;
|
||||||
|
let minValue = 0;
|
||||||
|
let valueRange = 0;
|
||||||
|
|
||||||
|
if (this.useBinMode) {
|
||||||
|
// Bin mode: render bin counts
|
||||||
|
if (this.bins.length === 0) {
|
||||||
|
this.container.textContent = 'No data yet. Click Start to begin.';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Always create a fixed-length array filled with 0s, then overlay actual bin data
|
||||||
|
dataToRender = new Array(this.maxDataPoints).fill(0);
|
||||||
|
|
||||||
|
// Overlay actual bin data (most recent bins, reversed for left-to-right display)
|
||||||
|
const startIndex = Math.max(0, this.bins.length - this.maxDataPoints);
|
||||||
|
const recentBins = this.bins.slice(startIndex);
|
||||||
|
|
||||||
|
// Reverse the bins so most recent is on the left, and overlay onto the fixed array
|
||||||
|
recentBins.reverse().forEach((bin, index) => {
|
||||||
|
if (index < this.maxDataPoints) {
|
||||||
|
dataToRender[index] = bin.count;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (this.debug) {
|
||||||
|
console.log('render() dataToRender:', dataToRender, 'bins length:', this.bins.length);
|
||||||
|
}
|
||||||
|
maxValue = Math.max(...dataToRender);
|
||||||
|
minValue = Math.min(...dataToRender);
|
||||||
|
valueRange = maxValue - minValue;
|
||||||
|
} else {
|
||||||
|
// Legacy mode: render individual values
|
||||||
|
if (this.data.length === 0) {
|
||||||
|
this.container.textContent = 'No data yet. Click Start to begin.';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
dataToRender = this.data;
|
||||||
|
maxValue = Math.max(...this.data);
|
||||||
|
minValue = Math.min(...this.data);
|
||||||
|
valueRange = maxValue - minValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let output = '';
|
||||||
|
const scale = this.maxHeight;
|
||||||
|
|
||||||
|
// Calculate scaling factor: each X represents at least 1 count
|
||||||
|
const maxCount = Math.max(...dataToRender);
|
||||||
|
const scaleFactor = Math.max(1, Math.ceil(maxCount / scale)); // 1 X = scaleFactor counts
|
||||||
|
const scaledMax = Math.ceil(maxCount / scaleFactor) * scaleFactor;
|
||||||
|
|
||||||
|
// Calculate Y-axis label width (for vertical text)
|
||||||
|
const yLabelWidth = this.yAxisLabel ? 2 : 0;
|
||||||
|
const yAxisPadding = this.yAxisLabel ? ' ' : '';
|
||||||
|
|
||||||
|
// Add title if provided (centered)
|
||||||
|
if (this.title) {
|
||||||
|
// const chartWidth = 4 + this.maxDataPoints * 2; // Y-axis numbers + data columns // TEMP: commented for no-space test
|
||||||
|
const chartWidth = 4 + this.maxDataPoints; // Y-axis numbers + data columns // TEMP: adjusted for no-space columns
|
||||||
|
const titlePadding = Math.floor((chartWidth - this.title.length) / 2);
|
||||||
|
output += yAxisPadding + ' '.repeat(Math.max(0, titlePadding)) + this.title + '\n\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Draw from top to bottom
|
||||||
|
for (let row = scale; row > 0; row--) {
|
||||||
|
let line = '';
|
||||||
|
|
||||||
|
// Add vertical Y-axis label character
|
||||||
|
if (this.yAxisLabel) {
|
||||||
|
const L = this.yAxisLabel.length;
|
||||||
|
const startRow = Math.floor((scale - L) / 2) + 1;
|
||||||
|
const relativeRow = scale - row + 1; // 1 at top, scale at bottom
|
||||||
|
if (relativeRow >= startRow && relativeRow < startRow + L) {
|
||||||
|
const labelIndex = relativeRow - startRow;
|
||||||
|
line += this.yAxisLabel[labelIndex] + ' ';
|
||||||
|
} else {
|
||||||
|
line += ' ';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate the actual count value this row represents (1 at bottom, increasing upward)
|
||||||
|
const rowCount = (row - 1) * scaleFactor + 1;
|
||||||
|
|
||||||
|
// Add Y-axis label (show actual count values)
|
||||||
|
line += String(rowCount).padStart(3, ' ') + ' |';
|
||||||
|
|
||||||
|
// Draw each column
|
||||||
|
for (let i = 0; i < dataToRender.length; i++) {
|
||||||
|
const count = dataToRender[i];
|
||||||
|
const scaledHeight = Math.ceil(count / scaleFactor);
|
||||||
|
|
||||||
|
if (scaledHeight >= row) {
|
||||||
|
// line += ' X'; // TEMP: commented out space between columns
|
||||||
|
line += 'X'; // TEMP: no space between columns
|
||||||
|
} else {
|
||||||
|
// line += ' '; // TEMP: commented out space between columns
|
||||||
|
line += ' '; // TEMP: single space for empty columns
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output += line + '\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Draw X-axis
|
||||||
|
// output += yAxisPadding + ' +' + '-'.repeat(this.maxDataPoints * 2) + '\n'; // TEMP: commented out for no-space test
|
||||||
|
output += yAxisPadding + ' +' + '-'.repeat(this.maxDataPoints) + '\n'; // TEMP: back to original length
|
||||||
|
|
||||||
|
// Draw X-axis labels based on mode and format
|
||||||
|
let xAxisLabels = yAxisPadding + ' '; // Initial padding to align with X-axis
|
||||||
|
|
||||||
|
// Determine label interval (every 5 columns)
|
||||||
|
const labelInterval = 5;
|
||||||
|
|
||||||
|
// Generate all labels first and store in array
|
||||||
|
let labels = [];
|
||||||
|
for (let i = 0; i < this.maxDataPoints; i++) {
|
||||||
|
if (i % labelInterval === 0) {
|
||||||
|
let label = '';
|
||||||
|
if (this.useBinMode) {
|
||||||
|
// For bin mode, show labels for all possible positions
|
||||||
|
// i=0 is leftmost (most recent), i=maxDataPoints-1 is rightmost (oldest)
|
||||||
|
const elapsedSec = (i * this.binDuration) / 1000;
|
||||||
|
// Format with appropriate precision for sub-second bins
|
||||||
|
if (this.binDuration < 1000) {
|
||||||
|
// Show decimal seconds for sub-second bins
|
||||||
|
label = elapsedSec.toFixed(1) + 's';
|
||||||
|
} else {
|
||||||
|
// Show whole seconds for 1+ second bins
|
||||||
|
label = String(Math.round(elapsedSec)) + 's';
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// For legacy mode, show data point numbers
|
||||||
|
const startIndex = Math.max(1, this.totalDataPoints - this.maxDataPoints + 1);
|
||||||
|
label = String(startIndex + i);
|
||||||
|
}
|
||||||
|
labels.push(label);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the label string with calculated spacing
|
||||||
|
for (let i = 0; i < labels.length; i++) {
|
||||||
|
const label = labels[i];
|
||||||
|
xAxisLabels += label;
|
||||||
|
|
||||||
|
// Add spacing: labelInterval - label.length (except for last label)
|
||||||
|
if (i < labels.length - 1) {
|
||||||
|
const spacing = labelInterval - label.length;
|
||||||
|
xAxisLabels += ' '.repeat(spacing);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure the label line extends to match the X-axis dash line length
|
||||||
|
// The dash line is this.maxDataPoints characters long, starting after " +"
|
||||||
|
const dashLineLength = this.maxDataPoints;
|
||||||
|
const minLabelLineLength = yAxisPadding.length + 4 + dashLineLength; // 4 for " "
|
||||||
|
if (xAxisLabels.length < minLabelLineLength) {
|
||||||
|
xAxisLabels += ' '.repeat(minLabelLineLength - xAxisLabels.length);
|
||||||
|
}
|
||||||
|
output += xAxisLabels + '\n';
|
||||||
|
|
||||||
|
// Add X-axis label if provided
|
||||||
|
if (this.xAxisLabel) {
|
||||||
|
// const labelPadding = Math.floor((this.maxDataPoints * 2 - this.xAxisLabel.length) / 2); // TEMP: commented for no-space test
|
||||||
|
const labelPadding = Math.floor((this.maxDataPoints - this.xAxisLabel.length) / 2); // TEMP: adjusted for no-space columns
|
||||||
|
output += '\n' + yAxisPadding + ' ' + ' '.repeat(Math.max(0, labelPadding)) + this.xAxisLabel + '\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
this.container.textContent = output;
|
||||||
|
|
||||||
|
// Adjust font size to fit width (only once at initialization)
|
||||||
|
if (this.autoFitWidth) {
|
||||||
|
this.adjustFontSize();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the external info display
|
||||||
|
if (this.useBinMode) {
|
||||||
|
const binCounts = this.bins.map(bin => bin.count);
|
||||||
|
const scaleFactor = Math.max(1, Math.ceil(maxValue / scale));
|
||||||
|
document.getElementById('values').textContent = `[${dataToRender.join(', ')}]`;
|
||||||
|
document.getElementById('max-value').textContent = maxValue;
|
||||||
|
document.getElementById('scale').textContent = `Min: ${minValue}, Max: ${maxValue}, 1X=${scaleFactor} counts`;
|
||||||
|
} else {
|
||||||
|
document.getElementById('values').textContent = `[${this.data.join(', ')}]`;
|
||||||
|
document.getElementById('max-value').textContent = maxValue;
|
||||||
|
document.getElementById('scale').textContent = `Min: ${minValue}, Max: ${maxValue}, Height: ${scale}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update the info display
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
updateInfo() {
|
||||||
|
if (this.useBinMode) {
|
||||||
|
const totalCount = this.bins.reduce((sum, bin) => sum + bin.count, 0);
|
||||||
|
document.getElementById('count').textContent = totalCount;
|
||||||
|
} else {
|
||||||
|
document.getElementById('count').textContent = this.data.length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the bin system
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
initializeBins() {
|
||||||
|
this.bins = [];
|
||||||
|
this.currentBinIndex = -1;
|
||||||
|
this.binStartTime = null;
|
||||||
|
this.chartStartTime = Date.now();
|
||||||
|
|
||||||
|
// Create first bin
|
||||||
|
this.rotateBin();
|
||||||
|
|
||||||
|
// Set up automatic bin rotation check
|
||||||
|
this.binCheckInterval = setInterval(() => {
|
||||||
|
this.checkBinRotation();
|
||||||
|
}, 100); // Check every 100ms for responsiveness
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if current bin should rotate and create new bin if needed
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
checkBinRotation() {
|
||||||
|
if (!this.useBinMode || !this.binStartTime) return;
|
||||||
|
|
||||||
|
const now = Date.now();
|
||||||
|
if ((now - this.binStartTime) >= this.binDuration) {
|
||||||
|
this.rotateBin();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rotate to a new bin, finalizing the current one
|
||||||
|
*/
|
||||||
|
rotateBin() {
|
||||||
|
// Finalize current bin if it exists
|
||||||
|
if (this.currentBinIndex >= 0) {
|
||||||
|
this.bins[this.currentBinIndex].isActive = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new bin
|
||||||
|
const newBin = {
|
||||||
|
startTime: Date.now(),
|
||||||
|
count: 0,
|
||||||
|
isActive: true
|
||||||
|
};
|
||||||
|
|
||||||
|
this.bins.push(newBin);
|
||||||
|
this.currentBinIndex = this.bins.length - 1;
|
||||||
|
this.binStartTime = newBin.startTime;
|
||||||
|
|
||||||
|
// Keep only the most recent bins
|
||||||
|
if (this.bins.length > this.maxDataPoints) {
|
||||||
|
this.bins.shift();
|
||||||
|
this.currentBinIndex--;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure currentBinIndex points to the last bin (the active one)
|
||||||
|
this.currentBinIndex = this.bins.length - 1;
|
||||||
|
|
||||||
|
// Force a render to update the display immediately
|
||||||
|
this.render();
|
||||||
|
this.updateInfo();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format X-axis label for a bin based on the configured format
|
||||||
|
* @param {number} binIndex - Index of the bin
|
||||||
|
* @returns {string} Formatted label
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
formatBinLabel(binIndex) {
|
||||||
|
const bin = this.bins[binIndex];
|
||||||
|
if (!bin) return ' ';
|
||||||
|
|
||||||
|
switch (this.xAxisLabelFormat) {
|
||||||
|
case 'bins':
|
||||||
|
return String(binIndex + 1).padStart(2, ' ');
|
||||||
|
|
||||||
|
case 'timestamps':
|
||||||
|
const time = new Date(bin.startTime);
|
||||||
|
return time.toLocaleTimeString('en-US', {
|
||||||
|
hour12: false,
|
||||||
|
hour: '2-digit',
|
||||||
|
minute: '2-digit',
|
||||||
|
second: '2-digit'
|
||||||
|
}).replace(/:/g, '');
|
||||||
|
|
||||||
|
case 'ranges':
|
||||||
|
const startSec = Math.floor((bin.startTime - this.chartStartTime) / 1000);
|
||||||
|
const endSec = startSec + Math.floor(this.binDuration / 1000);
|
||||||
|
return `${startSec}-${endSec}`;
|
||||||
|
|
||||||
|
case 'elapsed':
|
||||||
|
default:
|
||||||
|
// For elapsed time, always show time relative to the first bin (index 0)
|
||||||
|
// This keeps the leftmost label as 0s and increases to the right
|
||||||
|
const firstBinTime = this.bins[0] ? this.bins[0].startTime : this.chartStartTime;
|
||||||
|
const elapsedSec = Math.floor((bin.startTime - firstBinTime) / 1000);
|
||||||
|
return String(elapsedSec).padStart(2, ' ') + 's';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,616 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# Colors for output
|
|
||||||
RED='\033[0;31m'
|
|
||||||
GREEN='\033[0;32m'
|
|
||||||
YELLOW='\033[1;33m'
|
|
||||||
BLUE='\033[0;34m'
|
|
||||||
NC='\033[0m'
|
|
||||||
|
|
||||||
print_status() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
|
||||||
print_success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
|
||||||
print_warning() { echo -e "${YELLOW}[WARNING]${NC} $1"; }
|
|
||||||
print_error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
|
||||||
|
|
||||||
# Global variables
|
|
||||||
COMMIT_MESSAGE=""
|
|
||||||
RELEASE_MODE=false
|
|
||||||
|
|
||||||
show_usage() {
|
|
||||||
echo "C-Relay Build and Push Script"
|
|
||||||
echo ""
|
|
||||||
echo "Usage:"
|
|
||||||
echo " $0 \"commit message\" - Default: compile, increment patch, commit & push"
|
|
||||||
echo " $0 -r \"commit message\" - Release: compile x86+arm64, increment minor, create release"
|
|
||||||
echo ""
|
|
||||||
echo "Examples:"
|
|
||||||
echo " $0 \"Fixed event validation bug\""
|
|
||||||
echo " $0 --release \"Major release with new features\""
|
|
||||||
echo ""
|
|
||||||
echo "Default Mode (patch increment):"
|
|
||||||
echo " - Compile C-Relay"
|
|
||||||
echo " - Increment patch version (v1.2.3 → v1.2.4)"
|
|
||||||
echo " - Git add, commit with message, and push"
|
|
||||||
echo ""
|
|
||||||
echo "Release Mode (-r flag):"
|
|
||||||
echo " - Compile C-Relay for x86_64 and arm64 (dynamic and static versions)"
|
|
||||||
echo " - Increment minor version, zero patch (v1.2.3 → v1.3.0)"
|
|
||||||
echo " - Git add, commit, push, and create Gitea release"
|
|
||||||
echo ""
|
|
||||||
echo "Requirements for Release Mode:"
|
|
||||||
echo " - For ARM64 builds: make install-arm64-deps (optional - will build x86_64 only if missing)"
|
|
||||||
echo " - For static builds: sudo apt-get install musl-dev libcap-dev libuv1-dev libev-dev"
|
|
||||||
echo " - Gitea token in ~/.gitea_token for release uploads"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Parse command line arguments
|
|
||||||
while [[ $# -gt 0 ]]; do
|
|
||||||
case $1 in
|
|
||||||
-r|--release)
|
|
||||||
RELEASE_MODE=true
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
-h|--help)
|
|
||||||
show_usage
|
|
||||||
exit 0
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
# First non-flag argument is the commit message
|
|
||||||
if [[ -z "$COMMIT_MESSAGE" ]]; then
|
|
||||||
COMMIT_MESSAGE="$1"
|
|
||||||
fi
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
# Validate inputs
|
|
||||||
if [[ -z "$COMMIT_MESSAGE" ]]; then
|
|
||||||
print_error "Commit message is required"
|
|
||||||
echo ""
|
|
||||||
show_usage
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if we're in a git repository
|
|
||||||
check_git_repo() {
|
|
||||||
if ! git rev-parse --git-dir > /dev/null 2>&1; then
|
|
||||||
print_error "Not in a git repository"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to get current version and increment appropriately
|
|
||||||
increment_version() {
|
|
||||||
local increment_type="$1" # "patch" or "minor"
|
|
||||||
|
|
||||||
print_status "Getting current version..."
|
|
||||||
|
|
||||||
# Get the highest version tag (not chronologically latest)
|
|
||||||
LATEST_TAG=$(git tag -l 'v*.*.*' | sort -V | tail -n 1 || echo "")
|
|
||||||
if [[ -z "$LATEST_TAG" ]]; then
|
|
||||||
LATEST_TAG="v0.0.0"
|
|
||||||
print_warning "No version tags found, starting from $LATEST_TAG"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Extract version components (remove 'v' prefix)
|
|
||||||
VERSION=${LATEST_TAG#v}
|
|
||||||
|
|
||||||
# Parse major.minor.patch using regex
|
|
||||||
if [[ $VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
|
||||||
MAJOR=${BASH_REMATCH[1]}
|
|
||||||
MINOR=${BASH_REMATCH[2]}
|
|
||||||
PATCH=${BASH_REMATCH[3]}
|
|
||||||
else
|
|
||||||
print_error "Invalid version format in tag: $LATEST_TAG"
|
|
||||||
print_error "Expected format: v0.1.0"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Increment version based on type
|
|
||||||
if [[ "$increment_type" == "minor" ]]; then
|
|
||||||
# Minor release: increment minor, zero patch
|
|
||||||
NEW_MINOR=$((MINOR + 1))
|
|
||||||
NEW_PATCH=0
|
|
||||||
NEW_VERSION="v${MAJOR}.${NEW_MINOR}.${NEW_PATCH}"
|
|
||||||
print_status "Release mode: incrementing minor version"
|
|
||||||
else
|
|
||||||
# Default: increment patch
|
|
||||||
NEW_PATCH=$((PATCH + 1))
|
|
||||||
NEW_VERSION="v${MAJOR}.${MINOR}.${NEW_PATCH}"
|
|
||||||
print_status "Default mode: incrementing patch version"
|
|
||||||
fi
|
|
||||||
|
|
||||||
print_status "Current version: $LATEST_TAG"
|
|
||||||
print_status "New version: $NEW_VERSION"
|
|
||||||
|
|
||||||
# Export for use in other functions
|
|
||||||
export NEW_VERSION
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to compile the C-Relay project
|
|
||||||
compile_project() {
|
|
||||||
print_status "Compiling C-Relay..."
|
|
||||||
|
|
||||||
# Clean previous build
|
|
||||||
if make clean > /dev/null 2>&1; then
|
|
||||||
print_success "Cleaned previous build"
|
|
||||||
else
|
|
||||||
print_warning "Clean failed or no Makefile found"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Force regenerate main.h to pick up new tags
|
|
||||||
if make force-version > /dev/null 2>&1; then
|
|
||||||
print_success "Regenerated main.h"
|
|
||||||
else
|
|
||||||
print_warning "Failed to regenerate main.h"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Compile the project
|
|
||||||
if make > /dev/null 2>&1; then
|
|
||||||
print_success "C-Relay compiled successfully"
|
|
||||||
else
|
|
||||||
print_error "Compilation failed"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to build release binaries
|
|
||||||
build_release_binaries() {
|
|
||||||
print_status "Building release binaries..."
|
|
||||||
|
|
||||||
# Build x86_64 version
|
|
||||||
print_status "Building x86_64 version..."
|
|
||||||
make clean > /dev/null 2>&1
|
|
||||||
if make x86 > /dev/null 2>&1; then
|
|
||||||
if [[ -f "build/c_relay_x86" ]]; then
|
|
||||||
cp build/c_relay_x86 c-relay-x86_64
|
|
||||||
print_success "x86_64 binary created: c-relay-x86_64"
|
|
||||||
else
|
|
||||||
print_error "x86_64 binary not found after compilation"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
print_error "x86_64 build failed"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Try to build ARM64 version
|
|
||||||
print_status "Attempting ARM64 build..."
|
|
||||||
make clean > /dev/null 2>&1
|
|
||||||
if make arm64 > /dev/null 2>&1; then
|
|
||||||
if [[ -f "build/c_relay_arm64" ]]; then
|
|
||||||
cp build/c_relay_arm64 c-relay-arm64
|
|
||||||
print_success "ARM64 binary created: c-relay-arm64"
|
|
||||||
else
|
|
||||||
print_warning "ARM64 binary not found after compilation"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
print_warning "ARM64 build failed - ARM64 cross-compilation not properly set up"
|
|
||||||
print_status "Only x86_64 binary will be included in release"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Build static x86_64 version
|
|
||||||
print_status "Building static x86_64 version..."
|
|
||||||
make clean > /dev/null 2>&1
|
|
||||||
if make static-musl-x86_64 > /dev/null 2>&1; then
|
|
||||||
if [[ -f "build/c_relay_static_musl_x86_64" ]]; then
|
|
||||||
cp build/c_relay_static_musl_x86_64 c-relay-static-x86_64
|
|
||||||
print_success "Static x86_64 binary created: c-relay-static-x86_64"
|
|
||||||
else
|
|
||||||
print_warning "Static x86_64 binary not found after compilation"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
print_warning "Static x86_64 build failed - MUSL development packages may not be installed"
|
|
||||||
print_status "Run 'sudo apt-get install musl-dev libcap-dev libuv1-dev libev-dev' to enable static builds"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Try to build static ARM64 version
|
|
||||||
print_status "Attempting static ARM64 build..."
|
|
||||||
make clean > /dev/null 2>&1
|
|
||||||
if make static-musl-arm64 > /dev/null 2>&1; then
|
|
||||||
if [[ -f "build/c_relay_static_musl_arm64" ]]; then
|
|
||||||
cp build/c_relay_static_musl_arm64 c-relay-static-arm64
|
|
||||||
print_success "Static ARM64 binary created: c-relay-static-arm64"
|
|
||||||
else
|
|
||||||
print_warning "Static ARM64 binary not found after compilation"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
print_warning "Static ARM64 build failed - ARM64 cross-compilation or MUSL ARM64 packages not set up"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Restore normal build
|
|
||||||
make clean > /dev/null 2>&1
|
|
||||||
make > /dev/null 2>&1
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to commit and push changes
|
|
||||||
git_commit_and_push() {
|
|
||||||
print_status "Preparing git commit..."
|
|
||||||
|
|
||||||
# Stage all changes
|
|
||||||
if git add . > /dev/null 2>&1; then
|
|
||||||
print_success "Staged all changes"
|
|
||||||
else
|
|
||||||
print_error "Failed to stage changes"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if there are changes to commit
|
|
||||||
if git diff --staged --quiet; then
|
|
||||||
print_warning "No changes to commit"
|
|
||||||
else
|
|
||||||
# Commit changes
|
|
||||||
if git commit -m "$NEW_VERSION - $COMMIT_MESSAGE" > /dev/null 2>&1; then
|
|
||||||
print_success "Committed changes"
|
|
||||||
else
|
|
||||||
print_error "Failed to commit changes"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create new git tag
|
|
||||||
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
|
||||||
print_success "Created tag: $NEW_VERSION"
|
|
||||||
else
|
|
||||||
print_warning "Tag $NEW_VERSION already exists"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Push changes and tags
|
|
||||||
print_status "Pushing to remote repository..."
|
|
||||||
if git push > /dev/null 2>&1; then
|
|
||||||
print_success "Pushed changes"
|
|
||||||
else
|
|
||||||
print_error "Failed to push changes"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Push only the new tag to avoid conflicts with existing tags
|
|
||||||
if git push origin "$NEW_VERSION" > /dev/null 2>&1; then
|
|
||||||
print_success "Pushed tag: $NEW_VERSION"
|
|
||||||
else
|
|
||||||
print_warning "Tag push failed, trying force push..."
|
|
||||||
if git push --force origin "$NEW_VERSION" > /dev/null 2>&1; then
|
|
||||||
print_success "Force-pushed updated tag: $NEW_VERSION"
|
|
||||||
else
|
|
||||||
print_error "Failed to push tag: $NEW_VERSION"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to commit and push changes without creating a tag (tag already created)
|
|
||||||
git_commit_and_push_no_tag() {
|
|
||||||
print_status "Preparing git commit..."
|
|
||||||
|
|
||||||
# Stage all changes
|
|
||||||
if git add . > /dev/null 2>&1; then
|
|
||||||
print_success "Staged all changes"
|
|
||||||
else
|
|
||||||
print_error "Failed to stage changes"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if there are changes to commit
|
|
||||||
if git diff --staged --quiet; then
|
|
||||||
print_warning "No changes to commit"
|
|
||||||
else
|
|
||||||
# Commit changes
|
|
||||||
if git commit -m "$NEW_VERSION - $COMMIT_MESSAGE" > /dev/null 2>&1; then
|
|
||||||
print_success "Committed changes"
|
|
||||||
else
|
|
||||||
print_error "Failed to commit changes"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Push changes and tags
|
|
||||||
print_status "Pushing to remote repository..."
|
|
||||||
if git push > /dev/null 2>&1; then
|
|
||||||
print_success "Pushed changes"
|
|
||||||
else
|
|
||||||
print_error "Failed to push changes"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Push only the new tag to avoid conflicts with existing tags
|
|
||||||
if git push origin "$NEW_VERSION" > /dev/null 2>&1; then
|
|
||||||
print_success "Pushed tag: $NEW_VERSION"
|
|
||||||
else
|
|
||||||
print_warning "Tag push failed, trying force push..."
|
|
||||||
if git push --force origin "$NEW_VERSION" > /dev/null 2>&1; then
|
|
||||||
print_success "Force-pushed updated tag: $NEW_VERSION"
|
|
||||||
else
|
|
||||||
print_error "Failed to push tag: $NEW_VERSION"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to create Gitea release
|
|
||||||
create_gitea_release() {
|
|
||||||
print_status "Creating Gitea release..."
|
|
||||||
|
|
||||||
# Check for Gitea token
|
|
||||||
if [[ ! -f "$HOME/.gitea_token" ]]; then
|
|
||||||
print_warning "No ~/.gitea_token found. Skipping release creation."
|
|
||||||
print_warning "Create ~/.gitea_token with your Gitea access token to enable releases."
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
local token=$(cat "$HOME/.gitea_token" | tr -d '\n\r')
|
|
||||||
local api_url="https://git.laantungir.net/api/v1/repos/laantungir/c-relay"
|
|
||||||
|
|
||||||
# Create release
|
|
||||||
print_status "Creating release $NEW_VERSION..."
|
|
||||||
local response=$(curl -s -X POST "$api_url/releases" \
|
|
||||||
-H "Authorization: token $token" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d "{\"tag_name\": \"$NEW_VERSION\", \"name\": \"$NEW_VERSION\", \"body\": \"$COMMIT_MESSAGE\"}")
|
|
||||||
|
|
||||||
local upload_result=false
|
|
||||||
|
|
||||||
if echo "$response" | grep -q '"id"'; then
|
|
||||||
print_success "Created release $NEW_VERSION"
|
|
||||||
if upload_release_binaries "$api_url" "$token"; then
|
|
||||||
upload_result=true
|
|
||||||
fi
|
|
||||||
elif echo "$response" | grep -q "already exists"; then
|
|
||||||
print_warning "Release $NEW_VERSION already exists"
|
|
||||||
if upload_release_binaries "$api_url" "$token"; then
|
|
||||||
upload_result=true
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
print_error "Failed to create release $NEW_VERSION"
|
|
||||||
print_error "Response: $response"
|
|
||||||
|
|
||||||
# Try to check if the release exists anyway
|
|
||||||
print_status "Checking if release exists..."
|
|
||||||
local check_response=$(curl -s -H "Authorization: token $token" "$api_url/releases/tags/$NEW_VERSION")
|
|
||||||
if echo "$check_response" | grep -q '"id"'; then
|
|
||||||
print_warning "Release exists but creation response was unexpected"
|
|
||||||
if upload_release_binaries "$api_url" "$token"; then
|
|
||||||
upload_result=true
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
print_error "Release does not exist and creation failed"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Return based on upload success
|
|
||||||
if [[ "$upload_result" == true ]]; then
|
|
||||||
return 0
|
|
||||||
else
|
|
||||||
print_error "Binary upload failed"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to upload release binaries
|
|
||||||
upload_release_binaries() {
|
|
||||||
local api_url="$1"
|
|
||||||
local token="$2"
|
|
||||||
local upload_success=true
|
|
||||||
|
|
||||||
# Get release ID with more robust parsing
|
|
||||||
print_status "Getting release ID for $NEW_VERSION..."
|
|
||||||
local response=$(curl -s -H "Authorization: token $token" "$api_url/releases/tags/$NEW_VERSION")
|
|
||||||
local release_id=$(echo "$response" | grep -o '"id":[0-9]*' | head -n1 | cut -d: -f2)
|
|
||||||
|
|
||||||
if [[ -z "$release_id" ]]; then
|
|
||||||
print_error "Could not get release ID for $NEW_VERSION"
|
|
||||||
print_error "API Response: $response"
|
|
||||||
|
|
||||||
# Try to list all releases to debug
|
|
||||||
print_status "Available releases:"
|
|
||||||
curl -s -H "Authorization: token $token" "$api_url/releases" | grep -o '"tag_name":"[^"]*"' | head -5
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
print_success "Found release ID: $release_id"
|
|
||||||
|
|
||||||
# Upload x86_64 binary
|
|
||||||
if [[ -f "c-relay-x86_64" ]]; then
|
|
||||||
print_status "Uploading x86_64 binary..."
|
|
||||||
local upload_response=$(curl -s -w "\n%{http_code}" -X POST "$api_url/releases/$release_id/assets" \
|
|
||||||
-H "Authorization: token $token" \
|
|
||||||
-F "attachment=@c-relay-x86_64;filename=c-relay-${NEW_VERSION}-linux-x86_64")
|
|
||||||
|
|
||||||
local http_code=$(echo "$upload_response" | tail -n1)
|
|
||||||
local response_body=$(echo "$upload_response" | head -n -1)
|
|
||||||
|
|
||||||
if [[ "$http_code" == "201" ]]; then
|
|
||||||
print_success "Uploaded x86_64 binary successfully"
|
|
||||||
else
|
|
||||||
print_error "Failed to upload x86_64 binary (HTTP $http_code)"
|
|
||||||
print_error "Response: $response_body"
|
|
||||||
upload_success=false
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
print_warning "x86_64 binary not found: c-relay-x86_64"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Upload ARM64 binary
|
|
||||||
if [[ -f "c-relay-arm64" ]]; then
|
|
||||||
print_status "Uploading ARM64 binary..."
|
|
||||||
local upload_response=$(curl -s -w "\n%{http_code}" -X POST "$api_url/releases/$release_id/assets" \
|
|
||||||
-H "Authorization: token $token" \
|
|
||||||
-F "attachment=@c-relay-arm64;filename=c-relay-${NEW_VERSION}-linux-arm64")
|
|
||||||
|
|
||||||
local http_code=$(echo "$upload_response" | tail -n1)
|
|
||||||
local response_body=$(echo "$upload_response" | head -n -1)
|
|
||||||
|
|
||||||
if [[ "$http_code" == "201" ]]; then
|
|
||||||
print_success "Uploaded ARM64 binary successfully"
|
|
||||||
else
|
|
||||||
print_error "Failed to upload ARM64 binary (HTTP $http_code)"
|
|
||||||
print_error "Response: $response_body"
|
|
||||||
upload_success=false
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
print_warning "ARM64 binary not found: c-relay-arm64"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Upload static x86_64 binary
|
|
||||||
if [[ -f "c-relay-static-x86_64" ]]; then
|
|
||||||
print_status "Uploading static x86_64 binary..."
|
|
||||||
local upload_response=$(curl -s -w "\n%{http_code}" -X POST "$api_url/releases/$release_id/assets" \
|
|
||||||
-H "Authorization: token $token" \
|
|
||||||
-F "attachment=@c-relay-static-x86_64;filename=c-relay-${NEW_VERSION}-linux-x86_64-static")
|
|
||||||
|
|
||||||
local http_code=$(echo "$upload_response" | tail -n1)
|
|
||||||
local response_body=$(echo "$upload_response" | head -n -1)
|
|
||||||
|
|
||||||
if [[ "$http_code" == "201" ]]; then
|
|
||||||
print_success "Uploaded static x86_64 binary successfully"
|
|
||||||
else
|
|
||||||
print_error "Failed to upload static x86_64 binary (HTTP $http_code)"
|
|
||||||
print_error "Response: $response_body"
|
|
||||||
upload_success=false
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
print_warning "Static x86_64 binary not found: c-relay-static-x86_64"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Upload static ARM64 binary
|
|
||||||
if [[ -f "c-relay-static-arm64" ]]; then
|
|
||||||
print_status "Uploading static ARM64 binary..."
|
|
||||||
local upload_response=$(curl -s -w "\n%{http_code}" -X POST "$api_url/releases/$release_id/assets" \
|
|
||||||
-H "Authorization: token $token" \
|
|
||||||
-F "attachment=@c-relay-static-arm64;filename=c-relay-${NEW_VERSION}-linux-arm64-static")
|
|
||||||
|
|
||||||
local http_code=$(echo "$upload_response" | tail -n1)
|
|
||||||
local response_body=$(echo "$upload_response" | head -n -1)
|
|
||||||
|
|
||||||
if [[ "$http_code" == "201" ]]; then
|
|
||||||
print_success "Uploaded static ARM64 binary successfully"
|
|
||||||
else
|
|
||||||
print_error "Failed to upload static ARM64 binary (HTTP $http_code)"
|
|
||||||
print_error "Response: $response_body"
|
|
||||||
upload_success=false
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
print_warning "Static ARM64 binary not found: c-relay-static-arm64"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Return success/failure status
|
|
||||||
if [[ "$upload_success" == true ]]; then
|
|
||||||
return 0
|
|
||||||
else
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to clean up release binaries
|
|
||||||
cleanup_release_binaries() {
|
|
||||||
local force_cleanup="$1" # Optional parameter to force cleanup even on failure
|
|
||||||
|
|
||||||
if [[ "$force_cleanup" == "force" ]] || [[ "$upload_success" == true ]]; then
|
|
||||||
if [[ -f "c-relay-x86_64" ]]; then
|
|
||||||
rm -f c-relay-x86_64
|
|
||||||
print_status "Cleaned up x86_64 binary"
|
|
||||||
fi
|
|
||||||
if [[ -f "c-relay-arm64" ]]; then
|
|
||||||
rm -f c-relay-arm64
|
|
||||||
print_status "Cleaned up ARM64 binary"
|
|
||||||
fi
|
|
||||||
if [[ -f "c-relay-static-x86_64" ]]; then
|
|
||||||
rm -f c-relay-static-x86_64
|
|
||||||
print_status "Cleaned up static x86_64 binary"
|
|
||||||
fi
|
|
||||||
if [[ -f "c-relay-static-arm64" ]]; then
|
|
||||||
rm -f c-relay-static-arm64
|
|
||||||
print_status "Cleaned up static ARM64 binary"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
print_warning "Keeping binary files due to upload failures"
|
|
||||||
print_status "Files available for manual upload:"
|
|
||||||
if [[ -f "c-relay-x86_64" ]]; then
|
|
||||||
print_status " - c-relay-x86_64"
|
|
||||||
fi
|
|
||||||
if [[ -f "c-relay-arm64" ]]; then
|
|
||||||
print_status " - c-relay-arm64"
|
|
||||||
fi
|
|
||||||
if [[ -f "c-relay-static-x86_64" ]]; then
|
|
||||||
print_status " - c-relay-static-x86_64"
|
|
||||||
fi
|
|
||||||
if [[ -f "c-relay-static-arm64" ]]; then
|
|
||||||
print_status " - c-relay-static-arm64"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
main() {
|
|
||||||
print_status "C-Relay Build and Push Script"
|
|
||||||
|
|
||||||
# Check prerequisites
|
|
||||||
check_git_repo
|
|
||||||
|
|
||||||
if [[ "$RELEASE_MODE" == true ]]; then
|
|
||||||
print_status "=== RELEASE MODE ==="
|
|
||||||
|
|
||||||
# Increment minor version for releases
|
|
||||||
increment_version "minor"
|
|
||||||
|
|
||||||
# Create new git tag BEFORE compilation so version.h picks it up
|
|
||||||
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
|
||||||
print_success "Created tag: $NEW_VERSION"
|
|
||||||
else
|
|
||||||
print_warning "Tag $NEW_VERSION already exists, removing and recreating..."
|
|
||||||
git tag -d "$NEW_VERSION" > /dev/null 2>&1
|
|
||||||
git tag "$NEW_VERSION" > /dev/null 2>&1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Compile project first (will now pick up the new tag)
|
|
||||||
compile_project
|
|
||||||
|
|
||||||
# Build release binaries
|
|
||||||
build_release_binaries
|
|
||||||
|
|
||||||
# Commit and push (but skip tag creation since we already did it)
|
|
||||||
git_commit_and_push_no_tag
|
|
||||||
|
|
||||||
# Create Gitea release with binaries
|
|
||||||
if create_gitea_release; then
|
|
||||||
print_success "Release $NEW_VERSION completed successfully!"
|
|
||||||
print_status "Binaries uploaded to Gitea release"
|
|
||||||
upload_success=true
|
|
||||||
else
|
|
||||||
print_error "Release creation or binary upload failed"
|
|
||||||
upload_success=false
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Cleanup (only if upload was successful)
|
|
||||||
cleanup_release_binaries
|
|
||||||
|
|
||||||
else
|
|
||||||
print_status "=== DEFAULT MODE ==="
|
|
||||||
|
|
||||||
# Increment patch version for regular commits
|
|
||||||
increment_version "patch"
|
|
||||||
|
|
||||||
# Create new git tag BEFORE compilation so version.h picks it up
|
|
||||||
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
|
||||||
print_success "Created tag: $NEW_VERSION"
|
|
||||||
else
|
|
||||||
print_warning "Tag $NEW_VERSION already exists, removing and recreating..."
|
|
||||||
git tag -d "$NEW_VERSION" > /dev/null 2>&1
|
|
||||||
git tag "$NEW_VERSION" > /dev/null 2>&1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Compile project (will now pick up the new tag)
|
|
||||||
compile_project
|
|
||||||
|
|
||||||
# Commit and push (but skip tag creation since we already did it)
|
|
||||||
git_commit_and_push_no_tag
|
|
||||||
|
|
||||||
print_success "Build and push completed successfully!"
|
|
||||||
print_status "Version $NEW_VERSION pushed to repository"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Execute main function
|
|
||||||
main
|
|
||||||
@@ -9,11 +9,21 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|||||||
BUILD_DIR="$SCRIPT_DIR/build"
|
BUILD_DIR="$SCRIPT_DIR/build"
|
||||||
DOCKERFILE="$SCRIPT_DIR/Dockerfile.alpine-musl"
|
DOCKERFILE="$SCRIPT_DIR/Dockerfile.alpine-musl"
|
||||||
|
|
||||||
echo "=========================================="
|
# Parse command line arguments
|
||||||
echo "C-Relay MUSL Static Binary Builder"
|
DEBUG_BUILD=false
|
||||||
echo "=========================================="
|
if [[ "$1" == "--debug" ]]; then
|
||||||
|
DEBUG_BUILD=true
|
||||||
|
echo "=========================================="
|
||||||
|
echo "C-Relay MUSL Static Binary Builder (DEBUG MODE)"
|
||||||
|
echo "=========================================="
|
||||||
|
else
|
||||||
|
echo "=========================================="
|
||||||
|
echo "C-Relay MUSL Static Binary Builder (PRODUCTION MODE)"
|
||||||
|
echo "=========================================="
|
||||||
|
fi
|
||||||
echo "Project directory: $SCRIPT_DIR"
|
echo "Project directory: $SCRIPT_DIR"
|
||||||
echo "Build directory: $BUILD_DIR"
|
echo "Build directory: $BUILD_DIR"
|
||||||
|
echo "Debug build: $DEBUG_BUILD"
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
# Create build directory
|
# Create build directory
|
||||||
@@ -83,6 +93,7 @@ echo ""
|
|||||||
|
|
||||||
$DOCKER_CMD build \
|
$DOCKER_CMD build \
|
||||||
--platform "$PLATFORM" \
|
--platform "$PLATFORM" \
|
||||||
|
--build-arg DEBUG_BUILD=$DEBUG_BUILD \
|
||||||
-f "$DOCKERFILE" \
|
-f "$DOCKERFILE" \
|
||||||
-t c-relay-musl-builder:latest \
|
-t c-relay-musl-builder:latest \
|
||||||
--progress=plain \
|
--progress=plain \
|
||||||
@@ -105,6 +116,7 @@ echo "=========================================="
|
|||||||
# Build the builder stage to extract the binary
|
# Build the builder stage to extract the binary
|
||||||
$DOCKER_CMD build \
|
$DOCKER_CMD build \
|
||||||
--platform "$PLATFORM" \
|
--platform "$PLATFORM" \
|
||||||
|
--build-arg DEBUG_BUILD=$DEBUG_BUILD \
|
||||||
--target builder \
|
--target builder \
|
||||||
-f "$DOCKERFILE" \
|
-f "$DOCKERFILE" \
|
||||||
-t c-relay-static-builder-stage:latest \
|
-t c-relay-static-builder-stage:latest \
|
||||||
@@ -179,11 +191,16 @@ echo "=========================================="
|
|||||||
echo "Binary: $BUILD_DIR/$OUTPUT_NAME"
|
echo "Binary: $BUILD_DIR/$OUTPUT_NAME"
|
||||||
echo "Size: $(du -h "$BUILD_DIR/$OUTPUT_NAME" | cut -f1)"
|
echo "Size: $(du -h "$BUILD_DIR/$OUTPUT_NAME" | cut -f1)"
|
||||||
echo "Platform: $PLATFORM"
|
echo "Platform: $PLATFORM"
|
||||||
|
if [ "$DEBUG_BUILD" = true ]; then
|
||||||
|
echo "Build Type: DEBUG (with symbols, no optimization)"
|
||||||
|
else
|
||||||
|
echo "Build Type: PRODUCTION (optimized, stripped)"
|
||||||
|
fi
|
||||||
if [ "$TRULY_STATIC" = true ]; then
|
if [ "$TRULY_STATIC" = true ]; then
|
||||||
echo "Type: Fully static binary (Alpine MUSL-based)"
|
echo "Linkage: Fully static binary (Alpine MUSL-based)"
|
||||||
echo "Portability: Works on ANY Linux distribution"
|
echo "Portability: Works on ANY Linux distribution"
|
||||||
else
|
else
|
||||||
echo "Type: Static binary (may have minimal dependencies)"
|
echo "Linkage: Static binary (may have minimal dependencies)"
|
||||||
fi
|
fi
|
||||||
echo ""
|
echo ""
|
||||||
echo "✓ Build complete!"
|
echo "✓ Build complete!"
|
||||||
|
|||||||
1
c_utils_lib
Submodule
1
c_utils_lib
Submodule
Submodule c_utils_lib added at 442facd7e3
@@ -1,3 +1,19 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copy the binary to the deployment location
|
||||||
cp build/c_relay_x86 ~/Storage/c_relay/crelay
|
cp build/c_relay_x86 ~/Storage/c_relay/crelay
|
||||||
|
|
||||||
|
# Copy the local service file to systemd
|
||||||
|
sudo cp systemd/c-relay-local.service /etc/systemd/system/
|
||||||
|
|
||||||
|
# Reload systemd daemon to pick up the new service
|
||||||
|
sudo systemctl daemon-reload
|
||||||
|
|
||||||
|
# Enable the service (if not already enabled)
|
||||||
|
sudo systemctl enable c-relay-local.service
|
||||||
|
|
||||||
|
# Restart the service
|
||||||
|
sudo systemctl restart c-relay-local.service
|
||||||
|
|
||||||
|
# Show service status
|
||||||
|
sudo systemctl status c-relay-local.service --no-pager -l
|
||||||
|
|||||||
457
docs/c_utils_lib_architecture.md
Normal file
457
docs/c_utils_lib_architecture.md
Normal file
@@ -0,0 +1,457 @@
|
|||||||
|
# c_utils_lib Architecture Plan
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
`c_utils_lib` is a standalone C utility library designed to provide reusable, general-purpose functions for C projects. It serves as a learning repository and a practical toolkit for common C programming tasks.
|
||||||
|
|
||||||
|
## Design Philosophy
|
||||||
|
|
||||||
|
1. **Zero External Dependencies**: Only standard C library dependencies
|
||||||
|
2. **Modular Design**: Each utility is independent and can be used separately
|
||||||
|
3. **Learning-Oriented**: Well-documented code suitable for learning C
|
||||||
|
4. **Production-Ready**: Battle-tested utilities from real projects
|
||||||
|
5. **Cross-Platform**: Works on Linux, macOS, and other POSIX systems
|
||||||
|
|
||||||
|
## Repository Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
c_utils_lib/
|
||||||
|
├── README.md # Main documentation
|
||||||
|
├── LICENSE # MIT License
|
||||||
|
├── VERSION # Current version (e.g., v0.1.0)
|
||||||
|
├── build.sh # Build script
|
||||||
|
├── Makefile # Build system
|
||||||
|
├── .gitignore # Git ignore rules
|
||||||
|
│
|
||||||
|
├── include/ # Public headers
|
||||||
|
│ ├── c_utils.h # Main header (includes all utilities)
|
||||||
|
│ ├── debug.h # Debug/logging system
|
||||||
|
│ ├── version.h # Version utilities
|
||||||
|
│ ├── string_utils.h # String utilities (future)
|
||||||
|
│ └── memory_utils.h # Memory utilities (future)
|
||||||
|
│
|
||||||
|
├── src/ # Implementation files
|
||||||
|
│ ├── debug.c # Debug system implementation
|
||||||
|
│ ├── version.c # Version utilities implementation
|
||||||
|
│ ├── string_utils.c # String utilities (future)
|
||||||
|
│ └── memory_utils.c # Memory utilities (future)
|
||||||
|
│
|
||||||
|
├── examples/ # Usage examples
|
||||||
|
│ ├── debug_example.c # Debug system example
|
||||||
|
│ ├── version_example.c # Version utilities example
|
||||||
|
│ └── Makefile # Examples build system
|
||||||
|
│
|
||||||
|
├── tests/ # Unit tests
|
||||||
|
│ ├── test_debug.c # Debug system tests
|
||||||
|
│ ├── test_version.c # Version utilities tests
|
||||||
|
│ ├── run_tests.sh # Test runner
|
||||||
|
│ └── Makefile # Tests build system
|
||||||
|
│
|
||||||
|
└── docs/ # Additional documentation
|
||||||
|
├── API.md # Complete API reference
|
||||||
|
├── INTEGRATION.md # How to integrate into projects
|
||||||
|
├── VERSIONING.md # Versioning system guide
|
||||||
|
└── CONTRIBUTING.md # Contribution guidelines
|
||||||
|
```
|
||||||
|
|
||||||
|
## Initial Utilities (v0.1.0)
|
||||||
|
|
||||||
|
### 1. Debug System (`debug.h`, `debug.c`)
|
||||||
|
|
||||||
|
**Purpose**: Unified logging and debugging system with configurable verbosity levels.
|
||||||
|
|
||||||
|
**Features**:
|
||||||
|
- 5 debug levels: NONE, ERROR, WARN, INFO, DEBUG, TRACE
|
||||||
|
- Timestamp formatting
|
||||||
|
- File/line information at TRACE level
|
||||||
|
- Macro-based API for zero-cost when disabled
|
||||||
|
- Thread-safe (future enhancement)
|
||||||
|
|
||||||
|
**API**:
|
||||||
|
```c
|
||||||
|
// Initialization
|
||||||
|
void debug_init(int level);
|
||||||
|
|
||||||
|
// Logging macros
|
||||||
|
DEBUG_ERROR(format, ...);
|
||||||
|
DEBUG_WARN(format, ...);
|
||||||
|
DEBUG_INFO(format, ...);
|
||||||
|
DEBUG_LOG(format, ...);
|
||||||
|
DEBUG_TRACE(format, ...);
|
||||||
|
|
||||||
|
// Global debug level
|
||||||
|
extern debug_level_t g_debug_level;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Usage Example**:
|
||||||
|
```c
|
||||||
|
#include <c_utils/debug.h>
|
||||||
|
|
||||||
|
int main() {
|
||||||
|
debug_init(DEBUG_LEVEL_INFO);
|
||||||
|
DEBUG_INFO("Application started");
|
||||||
|
DEBUG_ERROR("Critical error: %s", error_msg);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Version Utilities (`version.h`, `version.c`)
|
||||||
|
|
||||||
|
**Purpose**: Reusable versioning system for C projects using git tags.
|
||||||
|
|
||||||
|
**Features**:
|
||||||
|
- Automatic version extraction from git tags
|
||||||
|
- Semantic versioning support (MAJOR.MINOR.PATCH)
|
||||||
|
- Version comparison functions
|
||||||
|
- Header file generation for embedding version info
|
||||||
|
- Build number tracking
|
||||||
|
|
||||||
|
**API**:
|
||||||
|
```c
|
||||||
|
// Version structure
|
||||||
|
typedef struct {
|
||||||
|
int major;
|
||||||
|
int minor;
|
||||||
|
int patch;
|
||||||
|
char* git_hash;
|
||||||
|
char* build_date;
|
||||||
|
} version_info_t;
|
||||||
|
|
||||||
|
// Get version from git
|
||||||
|
int version_get_from_git(version_info_t* version);
|
||||||
|
|
||||||
|
// Generate version header file
|
||||||
|
int version_generate_header(const char* output_path, const char* prefix);
|
||||||
|
|
||||||
|
// Compare versions
|
||||||
|
int version_compare(version_info_t* v1, version_info_t* v2);
|
||||||
|
|
||||||
|
// Format version string
|
||||||
|
char* version_to_string(version_info_t* version);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Usage Example**:
|
||||||
|
```c
|
||||||
|
#include <c_utils/version.h>
|
||||||
|
|
||||||
|
// In your build system:
|
||||||
|
version_generate_header("src/version.h", "MY_APP");
|
||||||
|
|
||||||
|
// In your code:
|
||||||
|
#include "version.h"
|
||||||
|
printf("Version: %s\n", MY_APP_VERSION);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Integration with Projects**:
|
||||||
|
```bash
|
||||||
|
# In project Makefile
|
||||||
|
version.h:
|
||||||
|
c_utils_lib/bin/generate_version src/version.h MY_PROJECT
|
||||||
|
```
|
||||||
|
|
||||||
|
## Build System
|
||||||
|
|
||||||
|
### Static Library Output
|
||||||
|
|
||||||
|
```
|
||||||
|
libc_utils.a # Static library for linking
|
||||||
|
```
|
||||||
|
|
||||||
|
### Build Targets
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make # Build static library
|
||||||
|
make examples # Build examples
|
||||||
|
make test # Run tests
|
||||||
|
make install # Install to system (optional)
|
||||||
|
make clean # Clean build artifacts
|
||||||
|
```
|
||||||
|
|
||||||
|
### Build Script (`build.sh`)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
# Simplified build script similar to nostr_core_lib
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
lib|"")
|
||||||
|
make
|
||||||
|
;;
|
||||||
|
examples)
|
||||||
|
make examples
|
||||||
|
;;
|
||||||
|
test)
|
||||||
|
make test
|
||||||
|
;;
|
||||||
|
clean)
|
||||||
|
make clean
|
||||||
|
;;
|
||||||
|
install)
|
||||||
|
make install
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Usage: ./build.sh [lib|examples|test|clean|install]"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
```
|
||||||
|
|
||||||
|
## Versioning System Design
|
||||||
|
|
||||||
|
### How It Works
|
||||||
|
|
||||||
|
1. **Git Tags as Source of Truth**
|
||||||
|
- Version tags: `v0.1.0`, `v0.2.0`, etc.
|
||||||
|
- Follows semantic versioning
|
||||||
|
|
||||||
|
2. **Automatic Header Generation**
|
||||||
|
- Script reads git tags
|
||||||
|
- Generates header with version macros
|
||||||
|
- Includes build date and git hash
|
||||||
|
|
||||||
|
3. **Reusable Across Projects**
|
||||||
|
- Each project calls `version_generate_header()`
|
||||||
|
- Customizable prefix (e.g., `C_RELAY_VERSION`, `NOSTR_CORE_VERSION`)
|
||||||
|
- No hardcoded version numbers in source
|
||||||
|
|
||||||
|
### Example Generated Header
|
||||||
|
|
||||||
|
```c
|
||||||
|
// Auto-generated by c_utils_lib version system
|
||||||
|
#ifndef MY_PROJECT_VERSION_H
|
||||||
|
#define MY_PROJECT_VERSION_H
|
||||||
|
|
||||||
|
#define MY_PROJECT_VERSION "v0.1.0"
|
||||||
|
#define MY_PROJECT_VERSION_MAJOR 0
|
||||||
|
#define MY_PROJECT_VERSION_MINOR 1
|
||||||
|
#define MY_PROJECT_VERSION_PATCH 0
|
||||||
|
#define MY_PROJECT_GIT_HASH "a1b2c3d"
|
||||||
|
#define MY_PROJECT_BUILD_DATE "2025-10-15"
|
||||||
|
|
||||||
|
#endif
|
||||||
|
```
|
||||||
|
|
||||||
|
### Integration Pattern
|
||||||
|
|
||||||
|
```makefile
|
||||||
|
# In consuming project's Makefile
|
||||||
|
VERSION_SCRIPT = c_utils_lib/bin/generate_version
|
||||||
|
|
||||||
|
src/version.h: .git/refs/tags/*
|
||||||
|
$(VERSION_SCRIPT) src/version.h MY_PROJECT
|
||||||
|
|
||||||
|
my_app: src/version.h src/main.c
|
||||||
|
$(CC) src/main.c -o my_app -Ic_utils_lib/include -Lc_utils_lib -lc_utils
|
||||||
|
```
|
||||||
|
|
||||||
|
## Future Utilities (Roadmap)
|
||||||
|
|
||||||
|
### String Utilities (`string_utils.h`)
|
||||||
|
- Safe string operations (bounds checking)
|
||||||
|
- String trimming, splitting, joining
|
||||||
|
- Case conversion
|
||||||
|
- Pattern matching helpers
|
||||||
|
|
||||||
|
### Memory Utilities (`memory_utils.h`)
|
||||||
|
- Safe allocation wrappers
|
||||||
|
- Memory pool management
|
||||||
|
- Leak detection helpers (debug builds)
|
||||||
|
- Arena allocators
|
||||||
|
|
||||||
|
### Configuration Utilities (`config_utils.h`)
|
||||||
|
- INI file parsing
|
||||||
|
- JSON configuration (using cJSON)
|
||||||
|
- Environment variable helpers
|
||||||
|
- Command-line argument parsing
|
||||||
|
|
||||||
|
### File Utilities (`file_utils.h`)
|
||||||
|
- Safe file operations
|
||||||
|
- Directory traversal
|
||||||
|
- Path manipulation
|
||||||
|
- File watching (inotify wrapper)
|
||||||
|
|
||||||
|
### Time Utilities (`time_utils.h`)
|
||||||
|
- Timestamp formatting
|
||||||
|
- Duration calculations
|
||||||
|
- Timer utilities
|
||||||
|
- Rate limiting helpers
|
||||||
|
|
||||||
|
## Integration Guide
|
||||||
|
|
||||||
|
### As Git Submodule
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# In your project
|
||||||
|
git submodule add https://github.com/yourusername/c_utils_lib.git
|
||||||
|
git submodule update --init --recursive
|
||||||
|
|
||||||
|
# Build the library
|
||||||
|
cd c_utils_lib && ./build.sh lib && cd ..
|
||||||
|
|
||||||
|
# Update your Makefile
|
||||||
|
INCLUDES += -Ic_utils_lib/include
|
||||||
|
LIBS += -Lc_utils_lib -lc_utils
|
||||||
|
```
|
||||||
|
|
||||||
|
### In Your Makefile
|
||||||
|
|
||||||
|
```makefile
|
||||||
|
# Check if c_utils_lib is built
|
||||||
|
c_utils_lib/libc_utils.a:
|
||||||
|
cd c_utils_lib && ./build.sh lib
|
||||||
|
|
||||||
|
# Link against it
|
||||||
|
my_app: c_utils_lib/libc_utils.a src/main.c
|
||||||
|
$(CC) src/main.c -o my_app \
|
||||||
|
-Ic_utils_lib/include \
|
||||||
|
-Lc_utils_lib -lc_utils
|
||||||
|
```
|
||||||
|
|
||||||
|
### In Your Code
|
||||||
|
|
||||||
|
```c
|
||||||
|
// Option 1: Include everything
|
||||||
|
#include <c_utils/c_utils.h>
|
||||||
|
|
||||||
|
// Option 2: Include specific utilities
|
||||||
|
#include <c_utils/debug.h>
|
||||||
|
#include <c_utils/version.h>
|
||||||
|
|
||||||
|
int main() {
|
||||||
|
debug_init(DEBUG_LEVEL_INFO);
|
||||||
|
DEBUG_INFO("Starting application version %s", MY_APP_VERSION);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Migration Plan for c-relay
|
||||||
|
|
||||||
|
### Phase 1: Extract Debug System
|
||||||
|
1. Create `c_utils_lib` repository
|
||||||
|
2. Move [`debug.c`](../src/debug.c) and [`debug.h`](../src/debug.h)
|
||||||
|
3. Create build system
|
||||||
|
4. Add basic tests
|
||||||
|
|
||||||
|
### Phase 2: Add Versioning System
|
||||||
|
1. Extract version generation logic from c-relay
|
||||||
|
2. Create reusable version utilities
|
||||||
|
3. Update c-relay to use new system
|
||||||
|
4. Update nostr_core_lib to use new system
|
||||||
|
|
||||||
|
### Phase 3: Add as Submodule
|
||||||
|
1. Add `c_utils_lib` as submodule to c-relay
|
||||||
|
2. Update c-relay Makefile
|
||||||
|
3. Update includes in c-relay source files
|
||||||
|
4. Remove old debug files from c-relay
|
||||||
|
|
||||||
|
### Phase 4: Documentation & Examples
|
||||||
|
1. Create comprehensive README
|
||||||
|
2. Add usage examples
|
||||||
|
3. Write integration guide
|
||||||
|
4. Document API
|
||||||
|
|
||||||
|
## Benefits
|
||||||
|
|
||||||
|
### For c-relay
|
||||||
|
- Cleaner separation of concerns
|
||||||
|
- Reusable utilities across projects
|
||||||
|
- Easier to maintain and test
|
||||||
|
- Consistent logging across codebase
|
||||||
|
|
||||||
|
### For Learning C
|
||||||
|
- Real-world utility implementations
|
||||||
|
- Best practices examples
|
||||||
|
- Modular design patterns
|
||||||
|
- Build system examples
|
||||||
|
|
||||||
|
### For Future Projects
|
||||||
|
- Drop-in utility library
|
||||||
|
- Proven, tested code
|
||||||
|
- Consistent patterns
|
||||||
|
- Time savings
|
||||||
|
|
||||||
|
## Testing Strategy
|
||||||
|
|
||||||
|
### Unit Tests
|
||||||
|
- Test each utility independently
|
||||||
|
- Mock external dependencies
|
||||||
|
- Edge case coverage
|
||||||
|
- Memory leak detection (valgrind)
|
||||||
|
|
||||||
|
### Integration Tests
|
||||||
|
- Test with real projects (c-relay, nostr_core_lib)
|
||||||
|
- Cross-platform testing
|
||||||
|
- Performance benchmarks
|
||||||
|
|
||||||
|
### Continuous Integration
|
||||||
|
- GitHub Actions for automated testing
|
||||||
|
- Multiple compiler versions (gcc, clang)
|
||||||
|
- Multiple platforms (Linux, macOS)
|
||||||
|
- Static analysis (cppcheck, clang-tidy)
|
||||||
|
|
||||||
|
## Documentation Standards
|
||||||
|
|
||||||
|
### Code Documentation
|
||||||
|
- Doxygen-style comments
|
||||||
|
- Function purpose and parameters
|
||||||
|
- Return value descriptions
|
||||||
|
- Usage examples in comments
|
||||||
|
|
||||||
|
### API Documentation
|
||||||
|
- Complete API reference in `docs/API.md`
|
||||||
|
- Usage examples for each function
|
||||||
|
- Common patterns and best practices
|
||||||
|
- Migration guides
|
||||||
|
|
||||||
|
### Learning Resources
|
||||||
|
- Detailed explanations of implementations
|
||||||
|
- Links to relevant C standards
|
||||||
|
- Common pitfalls and how to avoid them
|
||||||
|
- Performance considerations
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT License - permissive and suitable for learning and commercial use.
|
||||||
|
|
||||||
|
## Version History
|
||||||
|
|
||||||
|
- **v0.1.0** (Planned)
|
||||||
|
- Initial release
|
||||||
|
- Debug system
|
||||||
|
- Version utilities
|
||||||
|
- Basic documentation
|
||||||
|
|
||||||
|
- **v0.2.0** (Future)
|
||||||
|
- String utilities
|
||||||
|
- Memory utilities
|
||||||
|
- Enhanced documentation
|
||||||
|
|
||||||
|
- **v0.3.0** (Future)
|
||||||
|
- Configuration utilities
|
||||||
|
- File utilities
|
||||||
|
- Time utilities
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
|
||||||
|
1. ✅ Successfully integrated into c-relay
|
||||||
|
2. ✅ Successfully integrated into nostr_core_lib
|
||||||
|
3. ✅ All tests passing
|
||||||
|
4. ✅ Documentation complete
|
||||||
|
5. ✅ Examples working
|
||||||
|
6. ✅ Zero external dependencies (except standard library)
|
||||||
|
7. ✅ Cross-platform compatibility verified
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
1. Create repository structure
|
||||||
|
2. Implement debug system
|
||||||
|
3. Implement version utilities
|
||||||
|
4. Create build system
|
||||||
|
5. Write tests
|
||||||
|
6. Create documentation
|
||||||
|
7. Integrate into c-relay
|
||||||
|
8. Publish to GitHub
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Note**: This is a living document. Update as the library evolves and new utilities are added.
|
||||||
621
docs/c_utils_lib_implementation_plan.md
Normal file
621
docs/c_utils_lib_implementation_plan.md
Normal file
@@ -0,0 +1,621 @@
|
|||||||
|
# c_utils_lib Implementation Plan
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This document provides a step-by-step implementation plan for creating the `c_utils_lib` library and integrating it into the c-relay project.
|
||||||
|
|
||||||
|
## Phase 1: Repository Setup & Structure
|
||||||
|
|
||||||
|
### Step 1.1: Create Repository Structure
|
||||||
|
|
||||||
|
**Location**: Create outside c-relay project (sibling directory)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create directory structure
|
||||||
|
mkdir -p c_utils_lib/{include,src,examples,tests,docs,bin}
|
||||||
|
cd c_utils_lib
|
||||||
|
|
||||||
|
# Create subdirectories
|
||||||
|
mkdir -p include/c_utils
|
||||||
|
mkdir -p tests/results
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 1.2: Initialize Git Repository
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd c_utils_lib
|
||||||
|
git init
|
||||||
|
git branch -M main
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 1.3: Create Core Files
|
||||||
|
|
||||||
|
**Files to create**:
|
||||||
|
1. `README.md` - Main documentation
|
||||||
|
2. `LICENSE` - MIT License
|
||||||
|
3. `VERSION` - Version file (v0.1.0)
|
||||||
|
4. `.gitignore` - Git ignore rules
|
||||||
|
5. `Makefile` - Build system
|
||||||
|
6. `build.sh` - Build script
|
||||||
|
|
||||||
|
## Phase 2: Debug System Implementation
|
||||||
|
|
||||||
|
### Step 2.1: Move Debug Files
|
||||||
|
|
||||||
|
**Source files** (from c-relay):
|
||||||
|
- `src/debug.c` → `c_utils_lib/src/debug.c`
|
||||||
|
- `src/debug.h` → `c_utils_lib/include/c_utils/debug.h`
|
||||||
|
|
||||||
|
**Modifications needed**:
|
||||||
|
1. Update header guard in `debug.h`:
|
||||||
|
```c
|
||||||
|
#ifndef C_UTILS_DEBUG_H
|
||||||
|
#define C_UTILS_DEBUG_H
|
||||||
|
```
|
||||||
|
|
||||||
|
2. No namespace changes needed (keep simple API)
|
||||||
|
|
||||||
|
3. Add header documentation:
|
||||||
|
```c
|
||||||
|
/**
|
||||||
|
* @file debug.h
|
||||||
|
* @brief Debug and logging system with configurable verbosity levels
|
||||||
|
*
|
||||||
|
* Provides a simple, efficient logging system with 5 levels:
|
||||||
|
* - ERROR: Critical errors
|
||||||
|
* - WARN: Warnings
|
||||||
|
* - INFO: Informational messages
|
||||||
|
* - DEBUG: Debug messages
|
||||||
|
* - TRACE: Detailed trace with file:line info
|
||||||
|
*/
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 2.2: Create Main Header
|
||||||
|
|
||||||
|
**File**: `include/c_utils/c_utils.h`
|
||||||
|
|
||||||
|
```c
|
||||||
|
#ifndef C_UTILS_H
|
||||||
|
#define C_UTILS_H
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file c_utils.h
|
||||||
|
* @brief Main header for c_utils_lib - includes all utilities
|
||||||
|
*
|
||||||
|
* Include this header to access all c_utils_lib functionality.
|
||||||
|
* Alternatively, include specific headers for modular usage.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Version information
|
||||||
|
#define C_UTILS_VERSION "v0.1.0"
|
||||||
|
#define C_UTILS_VERSION_MAJOR 0
|
||||||
|
#define C_UTILS_VERSION_MINOR 1
|
||||||
|
#define C_UTILS_VERSION_PATCH 0
|
||||||
|
|
||||||
|
// Include all utilities
|
||||||
|
#include "debug.h"
|
||||||
|
#include "version.h"
|
||||||
|
|
||||||
|
#endif /* C_UTILS_H */
|
||||||
|
```
|
||||||
|
|
||||||
|
## Phase 3: Version Utilities Implementation
|
||||||
|
|
||||||
|
### Step 3.1: Design Version API
|
||||||
|
|
||||||
|
**File**: `include/c_utils/version.h`
|
||||||
|
|
||||||
|
```c
|
||||||
|
#ifndef C_UTILS_VERSION_H
|
||||||
|
#define C_UTILS_VERSION_H
|
||||||
|
|
||||||
|
#include <time.h>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Version information structure
|
||||||
|
*/
|
||||||
|
typedef struct {
|
||||||
|
int major;
|
||||||
|
int minor;
|
||||||
|
int patch;
|
||||||
|
char git_hash[41]; // SHA-1 hash (40 chars + null)
|
||||||
|
char build_date[32]; // ISO 8601 format
|
||||||
|
char version_string[64]; // "vX.Y.Z" format
|
||||||
|
} version_info_t;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Extract version from git tags
|
||||||
|
* @param version Output version structure
|
||||||
|
* @return 0 on success, -1 on error
|
||||||
|
*/
|
||||||
|
int version_get_from_git(version_info_t* version);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Generate version header file for a project
|
||||||
|
* @param output_path Path to output header file
|
||||||
|
* @param prefix Prefix for macros (e.g., "MY_APP")
|
||||||
|
* @return 0 on success, -1 on error
|
||||||
|
*/
|
||||||
|
int version_generate_header(const char* output_path, const char* prefix);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Compare two versions
|
||||||
|
* @return -1 if v1 < v2, 0 if equal, 1 if v1 > v2
|
||||||
|
*/
|
||||||
|
int version_compare(const version_info_t* v1, const version_info_t* v2);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Format version as string
|
||||||
|
* @param version Version structure
|
||||||
|
* @param buffer Output buffer
|
||||||
|
* @param buffer_size Size of output buffer
|
||||||
|
* @return Number of characters written
|
||||||
|
*/
|
||||||
|
int version_to_string(const version_info_t* version, char* buffer, size_t buffer_size);
|
||||||
|
|
||||||
|
#endif /* C_UTILS_VERSION_H */
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 3.2: Implement Version Utilities
|
||||||
|
|
||||||
|
**File**: `src/version.c`
|
||||||
|
|
||||||
|
Key functions to implement:
|
||||||
|
1. `version_get_from_git()` - Execute `git describe --tags` and parse
|
||||||
|
2. `version_generate_header()` - Generate header file with macros
|
||||||
|
3. `version_compare()` - Semantic version comparison
|
||||||
|
4. `version_to_string()` - Format version string
|
||||||
|
|
||||||
|
### Step 3.3: Create Version Generation Script
|
||||||
|
|
||||||
|
**File**: `bin/generate_version`
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
# Generate version header for a project
|
||||||
|
|
||||||
|
OUTPUT_FILE="$1"
|
||||||
|
PREFIX="$2"
|
||||||
|
|
||||||
|
if [ -z "$OUTPUT_FILE" ] || [ -z "$PREFIX" ]; then
|
||||||
|
echo "Usage: $0 <output_file> <prefix>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get version from git
|
||||||
|
if [ -d .git ]; then
|
||||||
|
VERSION=$(git describe --tags --always 2>/dev/null || echo "v0.0.0")
|
||||||
|
GIT_HASH=$(git rev-parse --short HEAD 2>/dev/null || echo "unknown")
|
||||||
|
else
|
||||||
|
VERSION="v0.0.0"
|
||||||
|
GIT_HASH="unknown"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse version
|
||||||
|
CLEAN_VERSION=$(echo "$VERSION" | sed 's/^v//' | cut -d- -f1)
|
||||||
|
MAJOR=$(echo "$CLEAN_VERSION" | cut -d. -f1)
|
||||||
|
MINOR=$(echo "$CLEAN_VERSION" | cut -d. -f2)
|
||||||
|
PATCH=$(echo "$CLEAN_VERSION" | cut -d. -f3)
|
||||||
|
BUILD_DATE=$(date -u +"%Y-%m-%d %H:%M:%S UTC")
|
||||||
|
|
||||||
|
# Generate header
|
||||||
|
cat > "$OUTPUT_FILE" << EOF
|
||||||
|
/* Auto-generated by c_utils_lib version system */
|
||||||
|
/* DO NOT EDIT - This file is automatically generated */
|
||||||
|
|
||||||
|
#ifndef ${PREFIX}_VERSION_H
|
||||||
|
#define ${PREFIX}_VERSION_H
|
||||||
|
|
||||||
|
#define ${PREFIX}_VERSION "v${CLEAN_VERSION}"
|
||||||
|
#define ${PREFIX}_VERSION_MAJOR ${MAJOR}
|
||||||
|
#define ${PREFIX}_VERSION_MINOR ${MINOR}
|
||||||
|
#define ${PREFIX}_VERSION_PATCH ${PATCH}
|
||||||
|
#define ${PREFIX}_GIT_HASH "${GIT_HASH}"
|
||||||
|
#define ${PREFIX}_BUILD_DATE "${BUILD_DATE}"
|
||||||
|
|
||||||
|
#endif /* ${PREFIX}_VERSION_H */
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Generated $OUTPUT_FILE with version v${CLEAN_VERSION}"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Phase 4: Build System
|
||||||
|
|
||||||
|
### Step 4.1: Create Makefile
|
||||||
|
|
||||||
|
**File**: `Makefile`
|
||||||
|
|
||||||
|
```makefile
|
||||||
|
# c_utils_lib Makefile
|
||||||
|
|
||||||
|
CC = gcc
|
||||||
|
AR = ar
|
||||||
|
CFLAGS = -Wall -Wextra -std=c99 -O2 -g
|
||||||
|
INCLUDES = -Iinclude
|
||||||
|
|
||||||
|
# Directories
|
||||||
|
SRC_DIR = src
|
||||||
|
INCLUDE_DIR = include
|
||||||
|
BUILD_DIR = build
|
||||||
|
EXAMPLES_DIR = examples
|
||||||
|
TESTS_DIR = tests
|
||||||
|
|
||||||
|
# Source files
|
||||||
|
SOURCES = $(wildcard $(SRC_DIR)/*.c)
|
||||||
|
OBJECTS = $(SOURCES:$(SRC_DIR)/%.c=$(BUILD_DIR)/%.o)
|
||||||
|
|
||||||
|
# Output library
|
||||||
|
LIBRARY = libc_utils.a
|
||||||
|
|
||||||
|
# Default target
|
||||||
|
all: $(LIBRARY)
|
||||||
|
|
||||||
|
# Create build directory
|
||||||
|
$(BUILD_DIR):
|
||||||
|
mkdir -p $(BUILD_DIR)
|
||||||
|
|
||||||
|
# Compile source files
|
||||||
|
$(BUILD_DIR)/%.o: $(SRC_DIR)/%.c | $(BUILD_DIR)
|
||||||
|
$(CC) $(CFLAGS) $(INCLUDES) -c $< -o $@
|
||||||
|
|
||||||
|
# Create static library
|
||||||
|
$(LIBRARY): $(OBJECTS)
|
||||||
|
$(AR) rcs $@ $^
|
||||||
|
@echo "Built $(LIBRARY)"
|
||||||
|
|
||||||
|
# Build examples
|
||||||
|
examples: $(LIBRARY)
|
||||||
|
$(MAKE) -C $(EXAMPLES_DIR)
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
test: $(LIBRARY)
|
||||||
|
$(MAKE) -C $(TESTS_DIR)
|
||||||
|
$(TESTS_DIR)/run_tests.sh
|
||||||
|
|
||||||
|
# Install to system (optional)
|
||||||
|
install: $(LIBRARY)
|
||||||
|
install -d /usr/local/lib
|
||||||
|
install -m 644 $(LIBRARY) /usr/local/lib/
|
||||||
|
install -d /usr/local/include/c_utils
|
||||||
|
install -m 644 $(INCLUDE_DIR)/c_utils/*.h /usr/local/include/c_utils/
|
||||||
|
@echo "Installed to /usr/local"
|
||||||
|
|
||||||
|
# Uninstall from system
|
||||||
|
uninstall:
|
||||||
|
rm -f /usr/local/lib/$(LIBRARY)
|
||||||
|
rm -rf /usr/local/include/c_utils
|
||||||
|
@echo "Uninstalled from /usr/local"
|
||||||
|
|
||||||
|
# Clean build artifacts
|
||||||
|
clean:
|
||||||
|
rm -rf $(BUILD_DIR) $(LIBRARY)
|
||||||
|
$(MAKE) -C $(EXAMPLES_DIR) clean 2>/dev/null || true
|
||||||
|
$(MAKE) -C $(TESTS_DIR) clean 2>/dev/null || true
|
||||||
|
|
||||||
|
# Help
|
||||||
|
help:
|
||||||
|
@echo "c_utils_lib Build System"
|
||||||
|
@echo ""
|
||||||
|
@echo "Targets:"
|
||||||
|
@echo " all Build static library (default)"
|
||||||
|
@echo " examples Build examples"
|
||||||
|
@echo " test Run tests"
|
||||||
|
@echo " install Install to /usr/local"
|
||||||
|
@echo " uninstall Remove from /usr/local"
|
||||||
|
@echo " clean Clean build artifacts"
|
||||||
|
@echo " help Show this help"
|
||||||
|
|
||||||
|
.PHONY: all examples test install uninstall clean help
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 4.2: Create Build Script
|
||||||
|
|
||||||
|
**File**: `build.sh`
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
# c_utils_lib build script
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
lib|"")
|
||||||
|
echo "Building c_utils_lib..."
|
||||||
|
make
|
||||||
|
;;
|
||||||
|
examples)
|
||||||
|
echo "Building examples..."
|
||||||
|
make examples
|
||||||
|
;;
|
||||||
|
test)
|
||||||
|
echo "Running tests..."
|
||||||
|
make test
|
||||||
|
;;
|
||||||
|
clean)
|
||||||
|
echo "Cleaning..."
|
||||||
|
make clean
|
||||||
|
;;
|
||||||
|
install)
|
||||||
|
echo "Installing..."
|
||||||
|
make install
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Usage: ./build.sh [lib|examples|test|clean|install]"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
echo "Done!"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Phase 5: Examples & Tests
|
||||||
|
|
||||||
|
### Step 5.1: Create Debug Example
|
||||||
|
|
||||||
|
**File**: `examples/debug_example.c`
|
||||||
|
|
||||||
|
```c
|
||||||
|
#include <c_utils/debug.h>
|
||||||
|
|
||||||
|
int main() {
|
||||||
|
// Initialize with INFO level
|
||||||
|
debug_init(DEBUG_LEVEL_INFO);
|
||||||
|
|
||||||
|
DEBUG_INFO("Application started");
|
||||||
|
DEBUG_WARN("This is a warning");
|
||||||
|
DEBUG_ERROR("This is an error");
|
||||||
|
|
||||||
|
// This won't print (level too high)
|
||||||
|
DEBUG_LOG("This debug message won't show");
|
||||||
|
|
||||||
|
// Change level to DEBUG
|
||||||
|
g_debug_level = DEBUG_LEVEL_DEBUG;
|
||||||
|
DEBUG_LOG("Now debug messages show");
|
||||||
|
|
||||||
|
// Change to TRACE to see file:line info
|
||||||
|
g_debug_level = DEBUG_LEVEL_TRACE;
|
||||||
|
DEBUG_TRACE("Trace with file:line information");
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 5.2: Create Version Example
|
||||||
|
|
||||||
|
**File**: `examples/version_example.c`
|
||||||
|
|
||||||
|
```c
|
||||||
|
#include <c_utils/version.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
|
||||||
|
int main() {
|
||||||
|
version_info_t version;
|
||||||
|
|
||||||
|
// Get version from git
|
||||||
|
if (version_get_from_git(&version) == 0) {
|
||||||
|
char version_str[64];
|
||||||
|
version_to_string(&version, version_str, sizeof(version_str));
|
||||||
|
|
||||||
|
printf("Version: %s\n", version_str);
|
||||||
|
printf("Git Hash: %s\n", version.git_hash);
|
||||||
|
printf("Build Date: %s\n", version.build_date);
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 5.3: Create Test Suite
|
||||||
|
|
||||||
|
**File**: `tests/test_debug.c`
|
||||||
|
|
||||||
|
```c
|
||||||
|
#include <c_utils/debug.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <string.h>
|
||||||
|
|
||||||
|
int test_debug_init() {
|
||||||
|
debug_init(DEBUG_LEVEL_INFO);
|
||||||
|
return (g_debug_level == DEBUG_LEVEL_INFO) ? 0 : -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int test_debug_levels() {
|
||||||
|
// Test that higher levels don't print at lower settings
|
||||||
|
debug_init(DEBUG_LEVEL_ERROR);
|
||||||
|
// Would need to capture stdout to verify
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int main() {
|
||||||
|
int failed = 0;
|
||||||
|
|
||||||
|
printf("Running debug tests...\n");
|
||||||
|
|
||||||
|
if (test_debug_init() != 0) {
|
||||||
|
printf("FAIL: test_debug_init\n");
|
||||||
|
failed++;
|
||||||
|
} else {
|
||||||
|
printf("PASS: test_debug_init\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (test_debug_levels() != 0) {
|
||||||
|
printf("FAIL: test_debug_levels\n");
|
||||||
|
failed++;
|
||||||
|
} else {
|
||||||
|
printf("PASS: test_debug_levels\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
return failed;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Phase 6: Documentation
|
||||||
|
|
||||||
|
### Step 6.1: Create README.md
|
||||||
|
|
||||||
|
Key sections:
|
||||||
|
1. Overview and purpose
|
||||||
|
2. Quick start guide
|
||||||
|
3. Installation instructions
|
||||||
|
4. Usage examples
|
||||||
|
5. API reference (brief)
|
||||||
|
6. Integration guide
|
||||||
|
7. Contributing guidelines
|
||||||
|
8. License
|
||||||
|
|
||||||
|
### Step 6.2: Create API Documentation
|
||||||
|
|
||||||
|
**File**: `docs/API.md`
|
||||||
|
|
||||||
|
Complete API reference with:
|
||||||
|
- Function signatures
|
||||||
|
- Parameter descriptions
|
||||||
|
- Return values
|
||||||
|
- Usage examples
|
||||||
|
- Common patterns
|
||||||
|
|
||||||
|
### Step 6.3: Create Integration Guide
|
||||||
|
|
||||||
|
**File**: `docs/INTEGRATION.md`
|
||||||
|
|
||||||
|
How to integrate into projects:
|
||||||
|
1. As git submodule
|
||||||
|
2. Makefile integration
|
||||||
|
3. Code examples
|
||||||
|
4. Migration from standalone utilities
|
||||||
|
|
||||||
|
## Phase 7: Integration with c-relay
|
||||||
|
|
||||||
|
### Step 7.1: Add as Submodule
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd /path/to/c-relay
|
||||||
|
git submodule add <repo-url> c_utils_lib
|
||||||
|
git submodule update --init --recursive
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 7.2: Update c-relay Makefile
|
||||||
|
|
||||||
|
```makefile
|
||||||
|
# Add to c-relay Makefile
|
||||||
|
C_UTILS_LIB = c_utils_lib/libc_utils.a
|
||||||
|
|
||||||
|
# Update includes
|
||||||
|
INCLUDES += -Ic_utils_lib/include
|
||||||
|
|
||||||
|
# Update libs
|
||||||
|
LIBS += -Lc_utils_lib -lc_utils
|
||||||
|
|
||||||
|
# Add dependency
|
||||||
|
$(C_UTILS_LIB):
|
||||||
|
cd c_utils_lib && ./build.sh lib
|
||||||
|
|
||||||
|
# Update main target
|
||||||
|
$(TARGET): $(C_UTILS_LIB) ...
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 7.3: Update c-relay Source Files
|
||||||
|
|
||||||
|
**Changes needed**:
|
||||||
|
|
||||||
|
1. Update includes:
|
||||||
|
```c
|
||||||
|
// Old
|
||||||
|
#include "debug.h"
|
||||||
|
|
||||||
|
// New
|
||||||
|
#include <c_utils/debug.h>
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Remove old debug files:
|
||||||
|
```bash
|
||||||
|
git rm src/debug.c src/debug.h
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Update all files that use debug system:
|
||||||
|
- `src/main.c`
|
||||||
|
- `src/config.c`
|
||||||
|
- `src/dm_admin.c`
|
||||||
|
- `src/websockets.c`
|
||||||
|
- `src/subscriptions.c`
|
||||||
|
- Any other files using DEBUG_* macros
|
||||||
|
|
||||||
|
### Step 7.4: Test Integration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd c-relay
|
||||||
|
make clean
|
||||||
|
make
|
||||||
|
./make_and_restart_relay.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Verify:
|
||||||
|
- Compilation succeeds
|
||||||
|
- Debug output works correctly
|
||||||
|
- No functionality regressions
|
||||||
|
|
||||||
|
## Phase 8: Version System Integration
|
||||||
|
|
||||||
|
### Step 8.1: Update c-relay Makefile for Versioning
|
||||||
|
|
||||||
|
```makefile
|
||||||
|
# Add version generation
|
||||||
|
src/version.h: .git/refs/tags/*
|
||||||
|
c_utils_lib/bin/generate_version src/version.h C_RELAY
|
||||||
|
|
||||||
|
# Add dependency
|
||||||
|
$(TARGET): src/version.h ...
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 8.2: Update c-relay to Use Generated Version
|
||||||
|
|
||||||
|
Replace hardcoded version in `src/main.h` with:
|
||||||
|
```c
|
||||||
|
#include "version.h"
|
||||||
|
// Use C_RELAY_VERSION instead of hardcoded VERSION
|
||||||
|
```
|
||||||
|
|
||||||
|
## Timeline Estimate
|
||||||
|
|
||||||
|
- **Phase 1**: Repository Setup - 1 hour
|
||||||
|
- **Phase 2**: Debug System - 2 hours
|
||||||
|
- **Phase 3**: Version Utilities - 4 hours
|
||||||
|
- **Phase 4**: Build System - 2 hours
|
||||||
|
- **Phase 5**: Examples & Tests - 3 hours
|
||||||
|
- **Phase 6**: Documentation - 3 hours
|
||||||
|
- **Phase 7**: c-relay Integration - 2 hours
|
||||||
|
- **Phase 8**: Version Integration - 2 hours
|
||||||
|
|
||||||
|
**Total**: ~19 hours
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
|
||||||
|
- [ ] c_utils_lib builds successfully
|
||||||
|
- [ ] All tests pass
|
||||||
|
- [ ] Examples compile and run
|
||||||
|
- [ ] c-relay integrates successfully
|
||||||
|
- [ ] Debug output works in c-relay
|
||||||
|
- [ ] Version generation works
|
||||||
|
- [ ] Documentation complete
|
||||||
|
- [ ] No regressions in c-relay functionality
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
1. Review this plan with stakeholders
|
||||||
|
2. Create repository structure
|
||||||
|
3. Implement debug system
|
||||||
|
4. Implement version utilities
|
||||||
|
5. Create build system
|
||||||
|
6. Write tests and examples
|
||||||
|
7. Create documentation
|
||||||
|
8. Integrate into c-relay
|
||||||
|
9. Test thoroughly
|
||||||
|
10. Publish to GitHub
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Keep the API simple and intuitive
|
||||||
|
- Focus on zero external dependencies
|
||||||
|
- Prioritize learning value in code comments
|
||||||
|
- Make integration as easy as possible
|
||||||
|
- Document everything thoroughly
|
||||||
298
docs/libwebsockets_proper_pattern.md
Normal file
298
docs/libwebsockets_proper_pattern.md
Normal file
@@ -0,0 +1,298 @@
|
|||||||
|
# Libwebsockets Proper Pattern - Message Queue Design
|
||||||
|
|
||||||
|
## Problem Analysis
|
||||||
|
|
||||||
|
### Current Violation
|
||||||
|
We're calling `lws_write()` directly from multiple code paths:
|
||||||
|
1. **Event broadcast** (subscriptions.c:667) - when events arrive
|
||||||
|
2. **OK responses** (websockets.c:855) - when processing EVENT messages
|
||||||
|
3. **EOSE responses** (websockets.c:976) - when processing REQ messages
|
||||||
|
4. **COUNT responses** (websockets.c:1922) - when processing COUNT messages
|
||||||
|
|
||||||
|
This violates libwebsockets' design pattern which requires:
|
||||||
|
- **`lws_write()` ONLY called from `LWS_CALLBACK_SERVER_WRITEABLE`**
|
||||||
|
- Application queues messages and requests writeable callback
|
||||||
|
- Libwebsockets handles write timing and socket buffer management
|
||||||
|
|
||||||
|
### Consequences of Violation
|
||||||
|
1. Partial writes when socket buffer is full
|
||||||
|
2. Multiple concurrent write attempts before callback fires
|
||||||
|
3. "write already pending" errors with single buffer
|
||||||
|
4. Frame corruption from interleaved partial writes
|
||||||
|
5. "Invalid frame header" errors on client side
|
||||||
|
|
||||||
|
## Correct Architecture
|
||||||
|
|
||||||
|
### Message Queue Pattern
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
|
│ Application Layer │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ │
|
||||||
|
│ Event Arrives → Queue Message → Request Writeable Callback │
|
||||||
|
│ REQ Received → Queue EOSE → Request Writeable Callback │
|
||||||
|
│ EVENT Received→ Queue OK → Request Writeable Callback │
|
||||||
|
│ COUNT Received→ Queue COUNT → Request Writeable Callback │
|
||||||
|
│ │
|
||||||
|
└─────────────────────────────────────────────────────────────┘
|
||||||
|
↓
|
||||||
|
lws_callback_on_writable(wsi)
|
||||||
|
↓
|
||||||
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
|
│ LWS_CALLBACK_SERVER_WRITEABLE │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ │
|
||||||
|
│ 1. Dequeue next message from queue │
|
||||||
|
│ 2. Call lws_write() with message data │
|
||||||
|
│ 3. If queue not empty, request another callback │
|
||||||
|
│ │
|
||||||
|
└─────────────────────────────────────────────────────────────┘
|
||||||
|
↓
|
||||||
|
libwebsockets handles:
|
||||||
|
- Socket buffer management
|
||||||
|
- Partial write handling
|
||||||
|
- Frame atomicity
|
||||||
|
```
|
||||||
|
|
||||||
|
## Data Structures
|
||||||
|
|
||||||
|
### Message Queue Node
|
||||||
|
```c
|
||||||
|
typedef struct message_queue_node {
|
||||||
|
unsigned char* data; // Message data (with LWS_PRE space)
|
||||||
|
size_t length; // Message length (without LWS_PRE)
|
||||||
|
enum lws_write_protocol type; // LWS_WRITE_TEXT, etc.
|
||||||
|
struct message_queue_node* next;
|
||||||
|
} message_queue_node_t;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Per-Session Data Updates
|
||||||
|
```c
|
||||||
|
struct per_session_data {
|
||||||
|
// ... existing fields ...
|
||||||
|
|
||||||
|
// Message queue (replaces single buffer)
|
||||||
|
message_queue_node_t* message_queue_head;
|
||||||
|
message_queue_node_t* message_queue_tail;
|
||||||
|
int message_queue_count;
|
||||||
|
int writeable_requested; // Flag to prevent duplicate requests
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## Implementation Functions
|
||||||
|
|
||||||
|
### 1. Queue Message (Application Layer)
|
||||||
|
```c
|
||||||
|
int queue_message(struct lws* wsi, struct per_session_data* pss,
|
||||||
|
const char* message, size_t length,
|
||||||
|
enum lws_write_protocol type)
|
||||||
|
{
|
||||||
|
// Allocate node
|
||||||
|
message_queue_node_t* node = malloc(sizeof(message_queue_node_t));
|
||||||
|
|
||||||
|
// Allocate buffer with LWS_PRE space
|
||||||
|
node->data = malloc(LWS_PRE + length);
|
||||||
|
memcpy(node->data + LWS_PRE, message, length);
|
||||||
|
node->length = length;
|
||||||
|
node->type = type;
|
||||||
|
node->next = NULL;
|
||||||
|
|
||||||
|
// Add to queue (FIFO)
|
||||||
|
pthread_mutex_lock(&pss->session_lock);
|
||||||
|
if (!pss->message_queue_head) {
|
||||||
|
pss->message_queue_head = node;
|
||||||
|
pss->message_queue_tail = node;
|
||||||
|
} else {
|
||||||
|
pss->message_queue_tail->next = node;
|
||||||
|
pss->message_queue_tail = node;
|
||||||
|
}
|
||||||
|
pss->message_queue_count++;
|
||||||
|
pthread_mutex_unlock(&pss->session_lock);
|
||||||
|
|
||||||
|
// Request writeable callback (only if not already requested)
|
||||||
|
if (!pss->writeable_requested) {
|
||||||
|
pss->writeable_requested = 1;
|
||||||
|
lws_callback_on_writable(wsi);
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Process Queue (Writeable Callback)
|
||||||
|
```c
|
||||||
|
int process_message_queue(struct lws* wsi, struct per_session_data* pss)
|
||||||
|
{
|
||||||
|
pthread_mutex_lock(&pss->session_lock);
|
||||||
|
|
||||||
|
// Get next message from queue
|
||||||
|
message_queue_node_t* node = pss->message_queue_head;
|
||||||
|
if (!node) {
|
||||||
|
pss->writeable_requested = 0;
|
||||||
|
pthread_mutex_unlock(&pss->session_lock);
|
||||||
|
return 0; // Queue empty
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove from queue
|
||||||
|
pss->message_queue_head = node->next;
|
||||||
|
if (!pss->message_queue_head) {
|
||||||
|
pss->message_queue_tail = NULL;
|
||||||
|
}
|
||||||
|
pss->message_queue_count--;
|
||||||
|
|
||||||
|
pthread_mutex_unlock(&pss->session_lock);
|
||||||
|
|
||||||
|
// Write message (libwebsockets handles partial writes)
|
||||||
|
int result = lws_write(wsi, node->data + LWS_PRE, node->length, node->type);
|
||||||
|
|
||||||
|
// Free node
|
||||||
|
free(node->data);
|
||||||
|
free(node);
|
||||||
|
|
||||||
|
// If queue not empty, request another callback
|
||||||
|
pthread_mutex_lock(&pss->session_lock);
|
||||||
|
if (pss->message_queue_head) {
|
||||||
|
lws_callback_on_writable(wsi);
|
||||||
|
} else {
|
||||||
|
pss->writeable_requested = 0;
|
||||||
|
}
|
||||||
|
pthread_mutex_unlock(&pss->session_lock);
|
||||||
|
|
||||||
|
return (result < 0) ? -1 : 0;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Refactoring Changes
|
||||||
|
|
||||||
|
### Before (WRONG - Direct Write)
|
||||||
|
```c
|
||||||
|
// websockets.c:855 - OK response
|
||||||
|
int write_result = lws_write(wsi, buf + LWS_PRE, response_len, LWS_WRITE_TEXT);
|
||||||
|
if (write_result < 0) {
|
||||||
|
DEBUG_ERROR("Write failed");
|
||||||
|
} else if ((size_t)write_result != response_len) {
|
||||||
|
// Partial write - queue remaining data
|
||||||
|
queue_websocket_write(wsi, pss, ...);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### After (CORRECT - Queue Message)
|
||||||
|
```c
|
||||||
|
// websockets.c:855 - OK response
|
||||||
|
queue_message(wsi, pss, response_str, response_len, LWS_WRITE_TEXT);
|
||||||
|
// That's it! Writeable callback will handle the actual write
|
||||||
|
```
|
||||||
|
|
||||||
|
### Before (WRONG - Direct Write in Broadcast)
|
||||||
|
```c
|
||||||
|
// subscriptions.c:667 - EVENT broadcast
|
||||||
|
int write_result = lws_write(current_temp->wsi, buf + LWS_PRE, msg_len, LWS_WRITE_TEXT);
|
||||||
|
if (write_result < 0) {
|
||||||
|
DEBUG_ERROR("Write failed");
|
||||||
|
} else if ((size_t)write_result != msg_len) {
|
||||||
|
queue_websocket_write(...);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### After (CORRECT - Queue Message)
|
||||||
|
```c
|
||||||
|
// subscriptions.c:667 - EVENT broadcast
|
||||||
|
struct per_session_data* pss = lws_wsi_user(current_temp->wsi);
|
||||||
|
queue_message(current_temp->wsi, pss, msg_str, msg_len, LWS_WRITE_TEXT);
|
||||||
|
// Writeable callback will handle the actual write
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benefits of Correct Pattern
|
||||||
|
|
||||||
|
1. **No Partial Write Handling Needed**
|
||||||
|
- Libwebsockets handles partial writes internally
|
||||||
|
- We just queue complete messages
|
||||||
|
|
||||||
|
2. **No "Write Already Pending" Errors**
|
||||||
|
- Queue can hold unlimited messages
|
||||||
|
- Each processed sequentially from callback
|
||||||
|
|
||||||
|
3. **Thread Safety**
|
||||||
|
- Queue operations protected by session lock
|
||||||
|
- Write only from single callback thread
|
||||||
|
|
||||||
|
4. **Frame Atomicity**
|
||||||
|
- Libwebsockets ensures complete frame transmission
|
||||||
|
- No interleaved partial writes
|
||||||
|
|
||||||
|
5. **Simpler Code**
|
||||||
|
- No complex partial write state machine
|
||||||
|
- Just queue and forget
|
||||||
|
|
||||||
|
6. **Better Performance**
|
||||||
|
- Libwebsockets optimizes write timing
|
||||||
|
- Batches writes when socket ready
|
||||||
|
|
||||||
|
## Migration Steps
|
||||||
|
|
||||||
|
1. ✅ Identify all `lws_write()` call sites
|
||||||
|
2. ✅ Confirm violation of libwebsockets pattern
|
||||||
|
3. ⏳ Design message queue structure
|
||||||
|
4. ⏳ Implement `queue_message()` function
|
||||||
|
5. ⏳ Implement `process_message_queue()` function
|
||||||
|
6. ⏳ Update `per_session_data` structure
|
||||||
|
7. ⏳ Refactor OK response to use queue
|
||||||
|
8. ⏳ Refactor EOSE response to use queue
|
||||||
|
9. ⏳ Refactor COUNT response to use queue
|
||||||
|
10. ⏳ Refactor EVENT broadcast to use queue
|
||||||
|
11. ⏳ Update `LWS_CALLBACK_SERVER_WRITEABLE` handler
|
||||||
|
12. ⏳ Add queue cleanup in `LWS_CALLBACK_CLOSED`
|
||||||
|
13. ⏳ Remove old partial write code
|
||||||
|
14. ⏳ Test with rapid multiple events
|
||||||
|
15. ⏳ Test with large events (>4KB)
|
||||||
|
16. ⏳ Test under load
|
||||||
|
17. ⏳ Verify no frame errors
|
||||||
|
|
||||||
|
## Testing Strategy
|
||||||
|
|
||||||
|
### Test 1: Multiple Rapid Events
|
||||||
|
```bash
|
||||||
|
# Send 10 events rapidly to same client
|
||||||
|
for i in {1..10}; do
|
||||||
|
echo '["EVENT",{"kind":1,"content":"test'$i'","created_at":'$(date +%s)',...}]' | \
|
||||||
|
websocat ws://localhost:8888 &
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
**Expected**: All events queued and sent sequentially, no errors
|
||||||
|
|
||||||
|
### Test 2: Large Events
|
||||||
|
```bash
|
||||||
|
# Send event >4KB (forces multiple socket writes)
|
||||||
|
nak event --content "$(head -c 5000 /dev/urandom | base64)" | \
|
||||||
|
websocat ws://localhost:8888
|
||||||
|
```
|
||||||
|
|
||||||
|
**Expected**: Event queued, libwebsockets handles partial writes internally
|
||||||
|
|
||||||
|
### Test 3: Concurrent Connections
|
||||||
|
```bash
|
||||||
|
# 100 concurrent connections, each sending events
|
||||||
|
for i in {1..100}; do
|
||||||
|
(echo '["REQ","sub'$i'",{}]'; sleep 1) | websocat ws://localhost:8888 &
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
**Expected**: All subscriptions work, events broadcast correctly
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
|
||||||
|
- ✅ No `lws_write()` calls outside `LWS_CALLBACK_SERVER_WRITEABLE`
|
||||||
|
- ✅ No "write already pending" errors in logs
|
||||||
|
- ✅ No "Invalid frame header" errors on client side
|
||||||
|
- ✅ All messages delivered in correct order
|
||||||
|
- ✅ Large events (>4KB) handled correctly
|
||||||
|
- ✅ Multiple rapid events to same client work
|
||||||
|
- ✅ Concurrent connections stable under load
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
- [libwebsockets documentation](https://libwebsockets.org/lws-api-doc-main/html/index.html)
|
||||||
|
- [LWS_CALLBACK_SERVER_WRITEABLE](https://libwebsockets.org/lws-api-doc-main/html/group__callback-when-writeable.html)
|
||||||
|
- [lws_callback_on_writable()](https://libwebsockets.org/lws-api-doc-main/html/group__callback-when-writeable.html#ga96f3ad8e1e2c3e0c8e0b0e5e5e5e5e5e)
|
||||||
601
docs/monitoring_simplified_plan.md
Normal file
601
docs/monitoring_simplified_plan.md
Normal file
@@ -0,0 +1,601 @@
|
|||||||
|
# Simplified Monitoring Implementation Plan
|
||||||
|
## Kind 34567 Event Kind Distribution Reporting
|
||||||
|
|
||||||
|
**Date:** 2025-10-16
|
||||||
|
**Status:** Implementation Ready
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Simplified real-time monitoring system that:
|
||||||
|
- Reports event kind distribution (which includes total event count)
|
||||||
|
- Uses kind 34567 addressable events with `d=event_kinds`
|
||||||
|
- Controlled by two config variables
|
||||||
|
- Enabled on-demand when admin logs in
|
||||||
|
- Uses simple throttling to prevent performance impact
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Configuration Variables
|
||||||
|
|
||||||
|
### Database Config Table
|
||||||
|
|
||||||
|
Add two new configuration keys:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
INSERT INTO config (key, value, data_type, description, category) VALUES
|
||||||
|
('kind_34567_reporting_enabled', 'false', 'boolean',
|
||||||
|
'Enable/disable kind 34567 event kind distribution reporting', 'monitoring'),
|
||||||
|
('kind_34567_reporting_throttling_sec', '5', 'integer',
|
||||||
|
'Minimum seconds between kind 34567 reports (throttling)', 'monitoring');
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration Access
|
||||||
|
|
||||||
|
```c
|
||||||
|
// In src/monitoring.c or src/api.c
|
||||||
|
int is_monitoring_enabled(void) {
|
||||||
|
return get_config_bool("kind_34567_reporting_enabled", 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
int get_monitoring_throttle_seconds(void) {
|
||||||
|
return get_config_int("kind_34567_reporting_throttling_sec", 5);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Event Structure
|
||||||
|
|
||||||
|
### Kind 34567 Event Format
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "<event_id>",
|
||||||
|
"pubkey": "<relay_pubkey>",
|
||||||
|
"created_at": 1697123456,
|
||||||
|
"kind": 34567,
|
||||||
|
"content": "{\"data_type\":\"event_kinds\",\"timestamp\":1697123456,\"data\":{\"total_events\":125000,\"distribution\":[{\"kind\":1,\"count\":45000,\"percentage\":36.0},{\"kind\":3,\"count\":12500,\"percentage\":10.0}]}}",
|
||||||
|
"tags": [
|
||||||
|
["d", "event_kinds"],
|
||||||
|
["relay", "<relay_pubkey>"]
|
||||||
|
],
|
||||||
|
"sig": "<signature>"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Content JSON Structure
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"data_type": "event_kinds",
|
||||||
|
"timestamp": 1697123456,
|
||||||
|
"data": {
|
||||||
|
"total_events": 125000,
|
||||||
|
"distribution": [
|
||||||
|
{
|
||||||
|
"kind": 1,
|
||||||
|
"count": 45000,
|
||||||
|
"percentage": 36.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"kind": 3,
|
||||||
|
"count": 12500,
|
||||||
|
"percentage": 10.0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"query_time_ms": 18
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Implementation
|
||||||
|
|
||||||
|
### File Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
src/
|
||||||
|
monitoring.h # New file - monitoring system header
|
||||||
|
monitoring.c # New file - monitoring implementation
|
||||||
|
main.c # Modified - add trigger hook
|
||||||
|
config.c # Modified - add config keys (or use migration)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 1. Header File: `src/monitoring.h`
|
||||||
|
|
||||||
|
```c
|
||||||
|
#ifndef MONITORING_H
|
||||||
|
#define MONITORING_H
|
||||||
|
|
||||||
|
#include <time.h>
|
||||||
|
#include <cjson/cJSON.h>
|
||||||
|
|
||||||
|
// Initialize monitoring system
|
||||||
|
int init_monitoring_system(void);
|
||||||
|
|
||||||
|
// Cleanup monitoring system
|
||||||
|
void cleanup_monitoring_system(void);
|
||||||
|
|
||||||
|
// Called when an event is stored (from main.c)
|
||||||
|
void monitoring_on_event_stored(void);
|
||||||
|
|
||||||
|
// Enable/disable monitoring (called from admin API)
|
||||||
|
int set_monitoring_enabled(int enabled);
|
||||||
|
|
||||||
|
// Get monitoring status
|
||||||
|
int is_monitoring_enabled(void);
|
||||||
|
|
||||||
|
// Get throttle interval
|
||||||
|
int get_monitoring_throttle_seconds(void);
|
||||||
|
|
||||||
|
#endif /* MONITORING_H */
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Implementation: `src/monitoring.c`
|
||||||
|
|
||||||
|
```c
|
||||||
|
#include "monitoring.h"
|
||||||
|
#include "config.h"
|
||||||
|
#include "debug.h"
|
||||||
|
#include "../nostr_core_lib/nostr_core/nostr_core.h"
|
||||||
|
#include <sqlite3.h>
|
||||||
|
#include <string.h>
|
||||||
|
#include <time.h>
|
||||||
|
|
||||||
|
// External references
|
||||||
|
extern sqlite3* g_db;
|
||||||
|
extern int broadcast_event_to_subscriptions(cJSON* event);
|
||||||
|
extern int store_event(cJSON* event);
|
||||||
|
extern const char* get_config_value(const char* key);
|
||||||
|
extern int get_config_bool(const char* key, int default_value);
|
||||||
|
extern int get_config_int(const char* key, int default_value);
|
||||||
|
extern char* get_relay_private_key(void);
|
||||||
|
|
||||||
|
// Throttling state
|
||||||
|
static time_t last_report_time = 0;
|
||||||
|
|
||||||
|
// Initialize monitoring system
|
||||||
|
int init_monitoring_system(void) {
|
||||||
|
DEBUG_LOG("Monitoring system initialized");
|
||||||
|
last_report_time = 0;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cleanup monitoring system
|
||||||
|
void cleanup_monitoring_system(void) {
|
||||||
|
DEBUG_LOG("Monitoring system cleaned up");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if monitoring is enabled
|
||||||
|
int is_monitoring_enabled(void) {
|
||||||
|
return get_config_bool("kind_34567_reporting_enabled", 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get throttle interval
|
||||||
|
int get_monitoring_throttle_seconds(void) {
|
||||||
|
return get_config_int("kind_34567_reporting_throttling_sec", 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enable/disable monitoring
|
||||||
|
int set_monitoring_enabled(int enabled) {
|
||||||
|
// Update config table
|
||||||
|
const char* value = enabled ? "true" : "false";
|
||||||
|
|
||||||
|
// This would call update_config_in_table() or similar
|
||||||
|
// For now, assume we have a function to update config
|
||||||
|
extern int update_config_in_table(const char* key, const char* value);
|
||||||
|
return update_config_in_table("kind_34567_reporting_enabled", value);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query event kind distribution from database
|
||||||
|
static char* query_event_kind_distribution(void) {
|
||||||
|
if (!g_db) {
|
||||||
|
DEBUG_ERROR("Database not available for monitoring query");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct timespec start_time;
|
||||||
|
clock_gettime(CLOCK_MONOTONIC, &start_time);
|
||||||
|
|
||||||
|
// Query total events
|
||||||
|
sqlite3_stmt* stmt;
|
||||||
|
int total_events = 0;
|
||||||
|
|
||||||
|
if (sqlite3_prepare_v2(g_db, "SELECT COUNT(*) FROM events", -1, &stmt, NULL) == SQLITE_OK) {
|
||||||
|
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||||
|
total_events = sqlite3_column_int(stmt, 0);
|
||||||
|
}
|
||||||
|
sqlite3_finalize(stmt);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query kind distribution
|
||||||
|
cJSON* response = cJSON_CreateObject();
|
||||||
|
cJSON_AddStringToObject(response, "data_type", "event_kinds");
|
||||||
|
cJSON_AddNumberToObject(response, "timestamp", (double)time(NULL));
|
||||||
|
|
||||||
|
cJSON* data = cJSON_CreateObject();
|
||||||
|
cJSON_AddNumberToObject(data, "total_events", total_events);
|
||||||
|
|
||||||
|
cJSON* distribution = cJSON_CreateArray();
|
||||||
|
|
||||||
|
const char* sql =
|
||||||
|
"SELECT kind, COUNT(*) as count, "
|
||||||
|
"ROUND(COUNT(*) * 100.0 / (SELECT COUNT(*) FROM events), 2) as percentage "
|
||||||
|
"FROM events GROUP BY kind ORDER BY count DESC";
|
||||||
|
|
||||||
|
if (sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL) == SQLITE_OK) {
|
||||||
|
while (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||||
|
cJSON* kind_obj = cJSON_CreateObject();
|
||||||
|
cJSON_AddNumberToObject(kind_obj, "kind", sqlite3_column_int(stmt, 0));
|
||||||
|
cJSON_AddNumberToObject(kind_obj, "count", sqlite3_column_int64(stmt, 1));
|
||||||
|
cJSON_AddNumberToObject(kind_obj, "percentage", sqlite3_column_double(stmt, 2));
|
||||||
|
cJSON_AddItemToArray(distribution, kind_obj);
|
||||||
|
}
|
||||||
|
sqlite3_finalize(stmt);
|
||||||
|
}
|
||||||
|
|
||||||
|
cJSON_AddItemToObject(data, "distribution", distribution);
|
||||||
|
cJSON_AddItemToObject(response, "data", data);
|
||||||
|
|
||||||
|
// Calculate query time
|
||||||
|
struct timespec end_time;
|
||||||
|
clock_gettime(CLOCK_MONOTONIC, &end_time);
|
||||||
|
double query_time_ms = (end_time.tv_sec - start_time.tv_sec) * 1000.0 +
|
||||||
|
(end_time.tv_nsec - start_time.tv_nsec) / 1000000.0;
|
||||||
|
|
||||||
|
cJSON* metadata = cJSON_CreateObject();
|
||||||
|
cJSON_AddNumberToObject(metadata, "query_time_ms", query_time_ms);
|
||||||
|
cJSON_AddItemToObject(response, "metadata", metadata);
|
||||||
|
|
||||||
|
char* json_string = cJSON_Print(response);
|
||||||
|
cJSON_Delete(response);
|
||||||
|
|
||||||
|
return json_string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate and broadcast kind 34567 event
|
||||||
|
static int generate_monitoring_event(const char* json_content) {
|
||||||
|
if (!json_content) return -1;
|
||||||
|
|
||||||
|
// Get relay keys
|
||||||
|
const char* relay_pubkey = get_config_value("relay_pubkey");
|
||||||
|
char* relay_privkey_hex = get_relay_private_key();
|
||||||
|
if (!relay_pubkey || !relay_privkey_hex) {
|
||||||
|
if (relay_privkey_hex) free(relay_privkey_hex);
|
||||||
|
DEBUG_ERROR("Could not get relay keys for monitoring event");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert relay private key to bytes
|
||||||
|
unsigned char relay_privkey[32];
|
||||||
|
if (nostr_hex_to_bytes(relay_privkey_hex, relay_privkey, sizeof(relay_privkey)) != 0) {
|
||||||
|
free(relay_privkey_hex);
|
||||||
|
DEBUG_ERROR("Failed to convert relay private key");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
free(relay_privkey_hex);
|
||||||
|
|
||||||
|
// Create tags array
|
||||||
|
cJSON* tags = cJSON_CreateArray();
|
||||||
|
|
||||||
|
// d tag for addressable event
|
||||||
|
cJSON* d_tag = cJSON_CreateArray();
|
||||||
|
cJSON_AddItemToArray(d_tag, cJSON_CreateString("d"));
|
||||||
|
cJSON_AddItemToArray(d_tag, cJSON_CreateString("event_kinds"));
|
||||||
|
cJSON_AddItemToArray(tags, d_tag);
|
||||||
|
|
||||||
|
// relay tag
|
||||||
|
cJSON* relay_tag = cJSON_CreateArray();
|
||||||
|
cJSON_AddItemToArray(relay_tag, cJSON_CreateString("relay"));
|
||||||
|
cJSON_AddItemToArray(relay_tag, cJSON_CreateString(relay_pubkey));
|
||||||
|
cJSON_AddItemToArray(tags, relay_tag);
|
||||||
|
|
||||||
|
// Create and sign event
|
||||||
|
cJSON* event = nostr_create_and_sign_event(
|
||||||
|
34567, // kind
|
||||||
|
json_content, // content
|
||||||
|
tags, // tags
|
||||||
|
relay_privkey, // private key
|
||||||
|
time(NULL) // timestamp
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!event) {
|
||||||
|
DEBUG_ERROR("Failed to create and sign monitoring event");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Broadcast to subscriptions
|
||||||
|
broadcast_event_to_subscriptions(event);
|
||||||
|
|
||||||
|
// Store in database
|
||||||
|
int result = store_event(event);
|
||||||
|
|
||||||
|
cJSON_Delete(event);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called when an event is stored
|
||||||
|
void monitoring_on_event_stored(void) {
|
||||||
|
// Check if monitoring is enabled
|
||||||
|
if (!is_monitoring_enabled()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check throttling
|
||||||
|
time_t now = time(NULL);
|
||||||
|
int throttle_seconds = get_monitoring_throttle_seconds();
|
||||||
|
|
||||||
|
if (now - last_report_time < throttle_seconds) {
|
||||||
|
return; // Too soon, skip this update
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query event kind distribution
|
||||||
|
char* json_content = query_event_kind_distribution();
|
||||||
|
if (!json_content) {
|
||||||
|
DEBUG_ERROR("Failed to query event kind distribution");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate and broadcast monitoring event
|
||||||
|
int result = generate_monitoring_event(json_content);
|
||||||
|
free(json_content);
|
||||||
|
|
||||||
|
if (result == 0) {
|
||||||
|
last_report_time = now;
|
||||||
|
DEBUG_LOG("Generated kind 34567 monitoring event");
|
||||||
|
} else {
|
||||||
|
DEBUG_ERROR("Failed to generate monitoring event");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Integration: Modify `src/main.c`
|
||||||
|
|
||||||
|
Add monitoring hook to event storage:
|
||||||
|
|
||||||
|
```c
|
||||||
|
// At top of file
|
||||||
|
#include "monitoring.h"
|
||||||
|
|
||||||
|
// In main() function, after init_database()
|
||||||
|
if (init_monitoring_system() != 0) {
|
||||||
|
DEBUG_WARN("Failed to initialize monitoring system");
|
||||||
|
// Continue anyway - monitoring is optional
|
||||||
|
}
|
||||||
|
|
||||||
|
// In store_event() function, after successful storage
|
||||||
|
int store_event(cJSON* event) {
|
||||||
|
// ... existing code ...
|
||||||
|
|
||||||
|
if (rc != SQLITE_DONE) {
|
||||||
|
// ... error handling ...
|
||||||
|
}
|
||||||
|
|
||||||
|
free(tags_json);
|
||||||
|
|
||||||
|
// Trigger monitoring update
|
||||||
|
monitoring_on_event_stored();
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// In cleanup section of main()
|
||||||
|
cleanup_monitoring_system();
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Admin API: Enable/Disable Monitoring
|
||||||
|
|
||||||
|
Add admin command to enable monitoring (in `src/dm_admin.c` or `src/api.c`):
|
||||||
|
|
||||||
|
```c
|
||||||
|
// Handle admin command to enable monitoring
|
||||||
|
if (strcmp(command, "enable_monitoring") == 0) {
|
||||||
|
set_monitoring_enabled(1);
|
||||||
|
send_nip17_response(sender_pubkey,
|
||||||
|
"✅ Kind 34567 monitoring enabled",
|
||||||
|
error_msg, sizeof(error_msg));
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle admin command to disable monitoring
|
||||||
|
if (strcmp(command, "disable_monitoring") == 0) {
|
||||||
|
set_monitoring_enabled(0);
|
||||||
|
send_nip17_response(sender_pubkey,
|
||||||
|
"🔴 Kind 34567 monitoring disabled",
|
||||||
|
error_msg, sizeof(error_msg));
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle admin command to set throttle interval
|
||||||
|
if (strncmp(command, "set_monitoring_throttle ", 24) == 0) {
|
||||||
|
int seconds = atoi(command + 24);
|
||||||
|
if (seconds >= 1 && seconds <= 3600) {
|
||||||
|
char value[16];
|
||||||
|
snprintf(value, sizeof(value), "%d", seconds);
|
||||||
|
update_config_in_table("kind_34567_reporting_throttling_sec", value);
|
||||||
|
|
||||||
|
char response[128];
|
||||||
|
snprintf(response, sizeof(response),
|
||||||
|
"✅ Monitoring throttle set to %d seconds", seconds);
|
||||||
|
send_nip17_response(sender_pubkey, response, error_msg, sizeof(error_msg));
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Frontend Integration
|
||||||
|
|
||||||
|
### Admin Dashboard Subscription
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// When admin logs in to dashboard
|
||||||
|
async function enableMonitoring() {
|
||||||
|
// Send admin command to enable monitoring
|
||||||
|
await sendAdminCommand(['enable_monitoring']);
|
||||||
|
|
||||||
|
// Subscribe to kind 34567 events
|
||||||
|
const subscription = {
|
||||||
|
kinds: [34567],
|
||||||
|
authors: [relayPubkey],
|
||||||
|
"#d": ["event_kinds"]
|
||||||
|
};
|
||||||
|
|
||||||
|
relay.subscribe([subscription], {
|
||||||
|
onevent: (event) => {
|
||||||
|
handleMonitoringEvent(event);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle incoming monitoring events
|
||||||
|
function handleMonitoringEvent(event) {
|
||||||
|
const content = JSON.parse(event.content);
|
||||||
|
|
||||||
|
if (content.data_type === 'event_kinds') {
|
||||||
|
updateEventKindsChart(content.data);
|
||||||
|
updateTotalEventsDisplay(content.data.total_events);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// When admin logs out or closes dashboard
|
||||||
|
async function disableMonitoring() {
|
||||||
|
await sendAdminCommand(['disable_monitoring']);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Display Event Kind Distribution
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
function updateEventKindsChart(data) {
|
||||||
|
const { total_events, distribution } = data;
|
||||||
|
|
||||||
|
// Update total events display
|
||||||
|
document.getElementById('total-events').textContent =
|
||||||
|
total_events.toLocaleString();
|
||||||
|
|
||||||
|
// Update chart/table with distribution
|
||||||
|
const tableBody = document.getElementById('kind-distribution-table');
|
||||||
|
tableBody.innerHTML = '';
|
||||||
|
|
||||||
|
distribution.forEach(item => {
|
||||||
|
const row = document.createElement('tr');
|
||||||
|
row.innerHTML = `
|
||||||
|
<td>Kind ${item.kind}</td>
|
||||||
|
<td>${item.count.toLocaleString()}</td>
|
||||||
|
<td>${item.percentage}%</td>
|
||||||
|
`;
|
||||||
|
tableBody.appendChild(row);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Configuration Migration
|
||||||
|
|
||||||
|
### Add to Schema or Migration Script
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Add monitoring configuration
|
||||||
|
INSERT INTO config (key, value, data_type, description, category) VALUES
|
||||||
|
('kind_34567_reporting_enabled', 'false', 'boolean',
|
||||||
|
'Enable/disable kind 34567 event kind distribution reporting', 'monitoring'),
|
||||||
|
('kind_34567_reporting_throttling_sec', '5', 'integer',
|
||||||
|
'Minimum seconds between kind 34567 reports (throttling)', 'monitoring');
|
||||||
|
```
|
||||||
|
|
||||||
|
Or add to existing config initialization in `src/config.c`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
### 1. Enable Monitoring
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Via admin command (NIP-17 DM)
|
||||||
|
echo '["enable_monitoring"]' | nak event --kind 14 --content - ws://localhost:8888
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Subscribe to Monitoring Events
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Subscribe to kind 34567 events
|
||||||
|
nak req --kinds 34567 --authors <relay_pubkey> ws://localhost:8888
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Generate Events
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Send some test events to trigger monitoring
|
||||||
|
for i in {1..10}; do
|
||||||
|
nak event -c "Test event $i" ws://localhost:8888
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Verify Monitoring Events
|
||||||
|
|
||||||
|
You should see kind 34567 events every 5 seconds (or configured throttle interval) with event kind distribution.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Performance Impact
|
||||||
|
|
||||||
|
### With 3 events/second (relay.damus.io scale)
|
||||||
|
|
||||||
|
**Query execution**:
|
||||||
|
- Frequency: Every 5 seconds (throttled)
|
||||||
|
- Query time: ~700ms (for 1M events)
|
||||||
|
- Overhead: 700ms / 5000ms = 14% (acceptable)
|
||||||
|
|
||||||
|
**Per-event overhead**:
|
||||||
|
- Check if enabled: < 0.01ms
|
||||||
|
- Check throttle: < 0.01ms
|
||||||
|
- Total: < 0.02ms per event (negligible)
|
||||||
|
|
||||||
|
**Overall impact**: < 1% on event processing, 14% on query thread (separate from event processing)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
Once this is working, easy to add:
|
||||||
|
|
||||||
|
1. **More data types**: Add `d=connections`, `d=subscriptions`, etc.
|
||||||
|
2. **Materialized counters**: Optimize queries for very large databases
|
||||||
|
3. **Historical data**: Store monitoring events for trending
|
||||||
|
4. **Alerts**: Trigger on thresholds (e.g., > 90% capacity)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
This simplified plan provides:
|
||||||
|
|
||||||
|
✅ **Single data type**: Event kind distribution (includes total events)
|
||||||
|
✅ **Two config variables**: Enable/disable and throttle control
|
||||||
|
✅ **On-demand activation**: Enabled when admin logs in
|
||||||
|
✅ **Simple throttling**: Prevents performance impact
|
||||||
|
✅ **Clean implementation**: ~200 lines of code
|
||||||
|
✅ **Easy to extend**: Add more data types later
|
||||||
|
|
||||||
|
**Estimated implementation time**: 4-6 hours
|
||||||
|
|
||||||
|
**Files to create/modify**:
|
||||||
|
- Create: `src/monitoring.h` (~30 lines)
|
||||||
|
- Create: `src/monitoring.c` (~200 lines)
|
||||||
|
- Modify: `src/main.c` (~10 lines)
|
||||||
|
- Modify: `src/config.c` or migration (~5 lines)
|
||||||
|
- Modify: `src/dm_admin.c` or `src/api.c` (~30 lines)
|
||||||
|
- Create: `api/monitoring.js` (frontend, ~100 lines)
|
||||||
|
|
||||||
|
**Total new code**: ~375 lines
|
||||||
1189
docs/realtime_monitoring_design.md
Normal file
1189
docs/realtime_monitoring_design.md
Normal file
File diff suppressed because it is too large
Load Diff
325
docs/relay_traffic_measurement.md
Normal file
325
docs/relay_traffic_measurement.md
Normal file
@@ -0,0 +1,325 @@
|
|||||||
|
# Relay Traffic Measurement Guide
|
||||||
|
|
||||||
|
## Measuring Real-World Relay Traffic
|
||||||
|
|
||||||
|
To validate our performance assumptions, here are commands to measure actual event rates from live relays.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Command: Count Events Over 1 Minute
|
||||||
|
|
||||||
|
### Basic Command
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Count events from relay.damus.io over 60 seconds
|
||||||
|
timeout 60 nak req -s $(date +%s) --stream wss://relay.damus.io | wc -l
|
||||||
|
```
|
||||||
|
|
||||||
|
This will:
|
||||||
|
1. Subscribe to all new events (`-s $(date +%s)` = since now)
|
||||||
|
2. Stream for 60 seconds (`timeout 60`)
|
||||||
|
3. Count the lines (each line = 1 event)
|
||||||
|
|
||||||
|
### With Event Rate Display
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Show events per second in real-time
|
||||||
|
timeout 60 nak req -s $(date +%s) --stream wss://relay.damus.io | \
|
||||||
|
pv -l -i 1 -r > /dev/null
|
||||||
|
```
|
||||||
|
|
||||||
|
This displays:
|
||||||
|
- Total events received
|
||||||
|
- Current rate (events/second)
|
||||||
|
- Average rate
|
||||||
|
|
||||||
|
### With Detailed Statistics
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Count events and calculate statistics
|
||||||
|
echo "Measuring relay traffic for 60 seconds..."
|
||||||
|
START=$(date +%s)
|
||||||
|
COUNT=$(timeout 60 nak req -s $START --stream wss://relay.damus.io | wc -l)
|
||||||
|
END=$(date +%s)
|
||||||
|
DURATION=$((END - START))
|
||||||
|
|
||||||
|
echo "Results:"
|
||||||
|
echo " Total events: $COUNT"
|
||||||
|
echo " Duration: ${DURATION}s"
|
||||||
|
echo " Events/second: $(echo "scale=2; $COUNT / $DURATION" | bc)"
|
||||||
|
echo " Events/minute: $COUNT"
|
||||||
|
```
|
||||||
|
|
||||||
|
### With Event Kind Distribution
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Count events by kind over 60 seconds
|
||||||
|
timeout 60 nak req -s $(date +%s) --stream wss://relay.damus.io | \
|
||||||
|
jq -r '.kind' | \
|
||||||
|
sort | uniq -c | sort -rn
|
||||||
|
```
|
||||||
|
|
||||||
|
Output example:
|
||||||
|
```
|
||||||
|
45 1 # 45 text notes
|
||||||
|
12 3 # 12 contact lists
|
||||||
|
8 7 # 8 reactions
|
||||||
|
3 6 # 3 reposts
|
||||||
|
```
|
||||||
|
|
||||||
|
### With Timestamp Analysis
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Show event timestamps and calculate intervals
|
||||||
|
timeout 60 nak req -s $(date +%s) --stream wss://relay.damus.io | \
|
||||||
|
jq -r '.created_at' | \
|
||||||
|
awk 'NR>1 {print $1-prev} {prev=$1}' | \
|
||||||
|
awk '{sum+=$1; count++} END {
|
||||||
|
print "Average interval:", sum/count, "seconds"
|
||||||
|
print "Events per second:", count/sum
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Testing Multiple Relays
|
||||||
|
|
||||||
|
### Compare Traffic Across Relays
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
# test_relay_traffic.sh
|
||||||
|
|
||||||
|
RELAYS=(
|
||||||
|
"wss://relay.damus.io"
|
||||||
|
"wss://nos.lol"
|
||||||
|
"wss://relay.nostr.band"
|
||||||
|
"wss://nostr.wine"
|
||||||
|
)
|
||||||
|
|
||||||
|
DURATION=60
|
||||||
|
|
||||||
|
echo "Measuring relay traffic for ${DURATION} seconds..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
for relay in "${RELAYS[@]}"; do
|
||||||
|
echo "Testing: $relay"
|
||||||
|
count=$(timeout $DURATION nak req -s $(date +%s) --stream "$relay" 2>/dev/null | wc -l)
|
||||||
|
rate=$(echo "scale=2; $count / $DURATION" | bc)
|
||||||
|
echo " Events: $count"
|
||||||
|
echo " Rate: ${rate}/sec"
|
||||||
|
echo ""
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Expected Results (Based on Real Measurements)
|
||||||
|
|
||||||
|
### relay.damus.io (Large Public Relay)
|
||||||
|
- **Expected rate**: 0.5-2 events/second
|
||||||
|
- **60-second count**: 30-120 events
|
||||||
|
- **Peak times**: Higher during US daytime hours
|
||||||
|
|
||||||
|
### nos.lol (Medium Public Relay)
|
||||||
|
- **Expected rate**: 0.2-0.8 events/second
|
||||||
|
- **60-second count**: 12-48 events
|
||||||
|
|
||||||
|
### Personal/Small Relays
|
||||||
|
- **Expected rate**: 0.01-0.1 events/second
|
||||||
|
- **60-second count**: 1-6 events
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Using Results to Validate Performance Assumptions
|
||||||
|
|
||||||
|
After measuring your relay's traffic:
|
||||||
|
|
||||||
|
1. **Calculate average events/second**:
|
||||||
|
```
|
||||||
|
events_per_second = total_events / 60
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Estimate query overhead**:
|
||||||
|
```
|
||||||
|
# For 100k event database:
|
||||||
|
query_time = 70ms
|
||||||
|
overhead_percentage = (query_time * events_per_second) / 1000 * 100
|
||||||
|
|
||||||
|
# Example: 0.5 events/sec
|
||||||
|
overhead = (70 * 0.5) / 1000 * 100 = 3.5%
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Determine if optimization needed**:
|
||||||
|
- < 5% overhead: No optimization needed
|
||||||
|
- 5-20% overhead: Consider 1-second throttling
|
||||||
|
- > 20% overhead: Use materialized counters
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Real-Time Monitoring During Development
|
||||||
|
|
||||||
|
### Monitor Your Own Relay
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Watch events in real-time with count
|
||||||
|
nak req -s $(date +%s) --stream ws://localhost:8888 | \
|
||||||
|
awk '{count++; print count, $0}'
|
||||||
|
```
|
||||||
|
|
||||||
|
### Monitor with Event Details
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Show event kind and pubkey for each event
|
||||||
|
nak req -s $(date +%s) --stream ws://localhost:8888 | \
|
||||||
|
jq -r '"[\(.kind)] \(.pubkey[0:8])... \(.content[0:50])"'
|
||||||
|
```
|
||||||
|
|
||||||
|
### Continuous Traffic Monitoring
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Monitor traffic in 10-second windows
|
||||||
|
while true; do
|
||||||
|
echo "=== $(date) ==="
|
||||||
|
count=$(timeout 10 nak req -s $(date +%s) --stream ws://localhost:8888 | wc -l)
|
||||||
|
rate=$(echo "scale=2; $count / 10" | bc)
|
||||||
|
echo "Events: $count (${rate}/sec)"
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Performance Testing Commands
|
||||||
|
|
||||||
|
### Simulate Load
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Send test events to measure query performance
|
||||||
|
for i in {1..100}; do
|
||||||
|
nak event -c "Test event $i" ws://localhost:8888
|
||||||
|
sleep 0.1 # 10 events/second
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
### Measure Query Response Time
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Time how long queries take with current database
|
||||||
|
time sqlite3 your_relay.db "SELECT COUNT(*) FROM events"
|
||||||
|
time sqlite3 your_relay.db "SELECT kind, COUNT(*) FROM events GROUP BY kind"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Automated Traffic Analysis Script
|
||||||
|
|
||||||
|
Save this as `analyze_relay_traffic.sh`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
# Comprehensive relay traffic analysis
|
||||||
|
|
||||||
|
RELAY="${1:-ws://localhost:8888}"
|
||||||
|
DURATION="${2:-60}"
|
||||||
|
|
||||||
|
echo "Analyzing relay: $RELAY"
|
||||||
|
echo "Duration: ${DURATION} seconds"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Collect events
|
||||||
|
TMPFILE=$(mktemp)
|
||||||
|
timeout $DURATION nak req -s $(date +%s) --stream "$RELAY" > "$TMPFILE" 2>/dev/null
|
||||||
|
|
||||||
|
# Calculate statistics
|
||||||
|
TOTAL=$(wc -l < "$TMPFILE")
|
||||||
|
RATE=$(echo "scale=2; $TOTAL / $DURATION" | bc)
|
||||||
|
|
||||||
|
echo "=== Traffic Statistics ==="
|
||||||
|
echo "Total events: $TOTAL"
|
||||||
|
echo "Events/second: $RATE"
|
||||||
|
echo "Events/minute: $(echo "$TOTAL * 60 / $DURATION" | bc)"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "=== Event Kind Distribution ==="
|
||||||
|
jq -r '.kind' "$TMPFILE" | sort | uniq -c | sort -rn | head -10
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "=== Top Publishers ==="
|
||||||
|
jq -r '.pubkey[0:16]' "$TMPFILE" | sort | uniq -c | sort -rn | head -5
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "=== Performance Estimate ==="
|
||||||
|
echo "For 100k event database:"
|
||||||
|
echo " Query time: ~70ms"
|
||||||
|
echo " Overhead: $(echo "scale=2; 70 * $RATE / 10" | bc)%"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
rm "$TMPFILE"
|
||||||
|
```
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
```bash
|
||||||
|
chmod +x analyze_relay_traffic.sh
|
||||||
|
./analyze_relay_traffic.sh wss://relay.damus.io 60
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Interpreting Results
|
||||||
|
|
||||||
|
### Low Traffic (< 0.1 events/sec)
|
||||||
|
- **Typical for**: Personal relays, small communities
|
||||||
|
- **Recommendation**: Trigger on every event, no optimization
|
||||||
|
- **Expected overhead**: < 1%
|
||||||
|
|
||||||
|
### Medium Traffic (0.1-0.5 events/sec)
|
||||||
|
- **Typical for**: Medium public relays
|
||||||
|
- **Recommendation**: Trigger on every event, consider throttling if database > 100k
|
||||||
|
- **Expected overhead**: 1-5%
|
||||||
|
|
||||||
|
### High Traffic (0.5-2 events/sec)
|
||||||
|
- **Typical for**: Large public relays
|
||||||
|
- **Recommendation**: Use 1-second throttling
|
||||||
|
- **Expected overhead**: 5-20% without throttling, < 1% with throttling
|
||||||
|
|
||||||
|
### Very High Traffic (> 2 events/sec)
|
||||||
|
- **Typical for**: Major public relays (rare)
|
||||||
|
- **Recommendation**: Use materialized counters
|
||||||
|
- **Expected overhead**: > 20% without optimization
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Continuous Monitoring in Production
|
||||||
|
|
||||||
|
### Add to Relay Startup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# In your relay startup script
|
||||||
|
echo "Starting traffic monitoring..."
|
||||||
|
nohup bash -c 'while true; do
|
||||||
|
count=$(timeout 60 nak req -s $(date +%s) --stream ws://localhost:8888 2>/dev/null | wc -l)
|
||||||
|
echo "$(date +%Y-%m-%d\ %H:%M:%S) - Events/min: $count" >> traffic.log
|
||||||
|
done' &
|
||||||
|
```
|
||||||
|
|
||||||
|
### Analyze Historical Traffic
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# View traffic trends
|
||||||
|
cat traffic.log | awk '{print $4}' | \
|
||||||
|
awk '{sum+=$1; count++} END {print "Average:", sum/count, "events/min"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
Use these commands to:
|
||||||
|
1. ✅ Measure real-world traffic on your relay
|
||||||
|
2. ✅ Validate performance assumptions
|
||||||
|
3. ✅ Determine if optimization is needed
|
||||||
|
4. ✅ Monitor traffic trends over time
|
||||||
|
|
||||||
|
**Remember**: Most relays will measure < 1 event/second, making the simple "trigger on every event" approach perfectly viable.
|
||||||
630
docs/sql_query_admin_api.md
Normal file
630
docs/sql_query_admin_api.md
Normal file
@@ -0,0 +1,630 @@
|
|||||||
|
# SQL Query Admin API Design
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This document describes the design for a general-purpose SQL query interface for the C-Relay admin API. This allows administrators to execute read-only SQL queries against the relay database through cryptographically signed kind 23456 events with NIP-44 encrypted command arrays.
|
||||||
|
|
||||||
|
## Security Model
|
||||||
|
|
||||||
|
### Authentication
|
||||||
|
- All queries must be sent as kind 23456 events with NIP-44 encrypted content
|
||||||
|
- Events must be signed by the admin's private key
|
||||||
|
- Admin pubkey verified against `config.admin_pubkey`
|
||||||
|
- Follows the same authentication pattern as existing admin commands
|
||||||
|
|
||||||
|
### Query Restrictions
|
||||||
|
While authentication is cryptographically secure, we implement defensive safeguards:
|
||||||
|
|
||||||
|
1. **Read-Only Enforcement**
|
||||||
|
- Only SELECT statements allowed
|
||||||
|
- Block: INSERT, UPDATE, DELETE, DROP, CREATE, ALTER, PRAGMA (write operations)
|
||||||
|
- Allow: SELECT, WITH (for CTEs)
|
||||||
|
|
||||||
|
2. **Resource Limits**
|
||||||
|
- Query timeout: 5 seconds (configurable)
|
||||||
|
- Result row limit: 1000 rows (configurable)
|
||||||
|
- Result size limit: 1MB (configurable)
|
||||||
|
|
||||||
|
3. **Query Logging**
|
||||||
|
- All queries logged with timestamp, admin pubkey, execution time
|
||||||
|
- Failed queries logged with error message
|
||||||
|
|
||||||
|
## Command Format
|
||||||
|
|
||||||
|
### Admin Event Structure (Kind 23456)
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "event_id",
|
||||||
|
"pubkey": "admin_public_key",
|
||||||
|
"created_at": 1234567890,
|
||||||
|
"kind": 23456,
|
||||||
|
"content": "AqHBUgcM7dXFYLQuDVzGwMST1G8jtWYyVvYxXhVGEu4nAb4LVw...",
|
||||||
|
"tags": [
|
||||||
|
["p", "relay_public_key"]
|
||||||
|
],
|
||||||
|
"sig": "event_signature"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The `content` field contains a NIP-44 encrypted JSON array:
|
||||||
|
```json
|
||||||
|
["sql_query", "SELECT * FROM events LIMIT 10"]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Response Format (Kind 23457)
|
||||||
|
```json
|
||||||
|
["EVENT", "temp_sub_id", {
|
||||||
|
"id": "response_event_id",
|
||||||
|
"pubkey": "relay_public_key",
|
||||||
|
"created_at": 1234567890,
|
||||||
|
"kind": 23457,
|
||||||
|
"content": "nip44_encrypted_content",
|
||||||
|
"tags": [
|
||||||
|
["p", "admin_public_key"],
|
||||||
|
["e", "request_event_id"]
|
||||||
|
],
|
||||||
|
"sig": "response_event_signature"
|
||||||
|
}]
|
||||||
|
```
|
||||||
|
|
||||||
|
The `content` field contains NIP-44 encrypted JSON:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"query_type": "sql_query",
|
||||||
|
"request_id": "request_event_id",
|
||||||
|
"timestamp": 1234567890,
|
||||||
|
"query": "SELECT * FROM events LIMIT 10",
|
||||||
|
"execution_time_ms": 45,
|
||||||
|
"row_count": 10,
|
||||||
|
"columns": ["id", "pubkey", "created_at", "kind", "content"],
|
||||||
|
"rows": [
|
||||||
|
["abc123...", "def456...", 1234567890, 1, "Hello world"],
|
||||||
|
...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note:** The response includes the request event ID in two places:
|
||||||
|
1. **In tags**: `["e", "request_event_id"]` - Standard Nostr convention for event references
|
||||||
|
2. **In content**: `"request_id": "request_event_id"` - For easy access after decryption
|
||||||
|
|
||||||
|
### Error Response Format (Kind 23457)
|
||||||
|
```json
|
||||||
|
["EVENT", "temp_sub_id", {
|
||||||
|
"id": "response_event_id",
|
||||||
|
"pubkey": "relay_public_key",
|
||||||
|
"created_at": 1234567890,
|
||||||
|
"kind": 23457,
|
||||||
|
"content": "nip44_encrypted_content",
|
||||||
|
"tags": [
|
||||||
|
["p", "admin_public_key"],
|
||||||
|
["e", "request_event_id"]
|
||||||
|
],
|
||||||
|
"sig": "response_event_signature"
|
||||||
|
}]
|
||||||
|
```
|
||||||
|
|
||||||
|
The `content` field contains NIP-44 encrypted JSON:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"query_type": "sql_query",
|
||||||
|
"request_id": "request_event_id",
|
||||||
|
"timestamp": 1234567890,
|
||||||
|
"query": "DELETE FROM events",
|
||||||
|
"status": "error",
|
||||||
|
"error": "Query blocked: DELETE statements not allowed",
|
||||||
|
"error_type": "blocked_statement"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Available Database Tables and Views
|
||||||
|
|
||||||
|
### Core Tables
|
||||||
|
- **events** - All Nostr events (id, pubkey, created_at, kind, content, tags, sig)
|
||||||
|
- **config** - Configuration key-value pairs
|
||||||
|
- **auth_rules** - Authentication and authorization rules
|
||||||
|
- **subscription_events** - Subscription lifecycle events
|
||||||
|
- **event_broadcasts** - Event broadcast log
|
||||||
|
|
||||||
|
### Useful Views
|
||||||
|
- **recent_events** - Last 1000 events
|
||||||
|
- **event_stats** - Event statistics by type
|
||||||
|
- **configuration_events** - Kind 33334 configuration events
|
||||||
|
- **subscription_analytics** - Subscription metrics by date
|
||||||
|
- **active_subscriptions_log** - Currently active subscriptions
|
||||||
|
- **event_kinds_view** - Event distribution by kind
|
||||||
|
- **top_pubkeys_view** - Top 10 pubkeys by event count
|
||||||
|
- **time_stats_view** - Time-based statistics (24h, 7d, 30d)
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
### Backend (dm_admin.c)
|
||||||
|
|
||||||
|
#### 1. Query Validation Function
|
||||||
|
```c
|
||||||
|
int validate_sql_query(const char* query, char* error_msg, size_t error_size);
|
||||||
|
```
|
||||||
|
- Check for blocked keywords (case-insensitive)
|
||||||
|
- Validate query syntax (basic checks)
|
||||||
|
- Return 0 on success, -1 on failure
|
||||||
|
|
||||||
|
#### 2. Query Execution Function
|
||||||
|
```c
|
||||||
|
char* execute_sql_query(const char* query, char* error_msg, size_t error_size);
|
||||||
|
```
|
||||||
|
- Set query timeout using sqlite3_busy_timeout()
|
||||||
|
- Execute query with row/size limits
|
||||||
|
- Build JSON response with results
|
||||||
|
- Log query execution
|
||||||
|
- Return JSON string or NULL on error
|
||||||
|
|
||||||
|
#### 3. Command Handler Integration
|
||||||
|
Add to `process_dm_admin_command()` in [`dm_admin.c`](src/dm_admin.c:131):
|
||||||
|
```c
|
||||||
|
else if (strcmp(command_type, "sql_query") == 0) {
|
||||||
|
const char* query = get_tag_value(event, "sql_query", 1);
|
||||||
|
if (!query) {
|
||||||
|
DEBUG_ERROR("DM Admin: Missing sql_query parameter");
|
||||||
|
snprintf(error_message, error_size, "invalid: missing SQL query");
|
||||||
|
} else {
|
||||||
|
result = handle_sql_query_unified(event, query, error_message, error_size, wsi);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Add unified handler function:
|
||||||
|
```c
|
||||||
|
int handle_sql_query_unified(cJSON* event, const char* query,
|
||||||
|
char* error_message, size_t error_size,
|
||||||
|
struct lws* wsi) {
|
||||||
|
// Get request event ID for response correlation
|
||||||
|
cJSON* request_id_obj = cJSON_GetObjectItem(event, "id");
|
||||||
|
if (!request_id_obj || !cJSON_IsString(request_id_obj)) {
|
||||||
|
snprintf(error_message, error_size, "Missing request event ID");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
const char* request_id = cJSON_GetStringValue(request_id_obj);
|
||||||
|
|
||||||
|
// Validate query
|
||||||
|
if (!validate_sql_query(query, error_message, error_size)) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute query and include request_id in result
|
||||||
|
char* result_json = execute_sql_query(query, request_id, error_message, error_size);
|
||||||
|
if (!result_json) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send response as kind 23457 event with request ID in tags
|
||||||
|
cJSON* sender_pubkey_obj = cJSON_GetObjectItem(event, "pubkey");
|
||||||
|
if (!sender_pubkey_obj || !cJSON_IsString(sender_pubkey_obj)) {
|
||||||
|
free(result_json);
|
||||||
|
snprintf(error_message, error_size, "Missing sender pubkey");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const char* sender_pubkey = cJSON_GetStringValue(sender_pubkey_obj);
|
||||||
|
int send_result = send_admin_response(sender_pubkey, result_json, request_id,
|
||||||
|
error_message, error_size, wsi);
|
||||||
|
free(result_json);
|
||||||
|
|
||||||
|
return send_result;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Frontend (api/index.html)
|
||||||
|
|
||||||
|
#### SQL Query Section UI
|
||||||
|
Add to [`api/index.html`](api/index.html:1):
|
||||||
|
```html
|
||||||
|
<section id="sql-query-section" class="admin-section">
|
||||||
|
<h2>SQL Query Console</h2>
|
||||||
|
|
||||||
|
<div class="query-selector">
|
||||||
|
<label for="query-dropdown">Quick Queries & History:</label>
|
||||||
|
<select id="query-dropdown" onchange="loadSelectedQuery()">
|
||||||
|
<option value="">-- Select a query --</option>
|
||||||
|
<optgroup label="Common Queries">
|
||||||
|
<option value="recent_events">Recent Events</option>
|
||||||
|
<option value="event_stats">Event Statistics</option>
|
||||||
|
<option value="subscriptions">Active Subscriptions</option>
|
||||||
|
<option value="top_pubkeys">Top Pubkeys</option>
|
||||||
|
<option value="event_kinds">Event Kinds Distribution</option>
|
||||||
|
<option value="time_stats">Time-based Statistics</option>
|
||||||
|
</optgroup>
|
||||||
|
<optgroup label="Query History" id="history-group">
|
||||||
|
<!-- Dynamically populated from localStorage -->
|
||||||
|
</optgroup>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="query-editor">
|
||||||
|
<label for="sql-input">SQL Query:</label>
|
||||||
|
<textarea id="sql-input" rows="5" placeholder="SELECT * FROM events LIMIT 10"></textarea>
|
||||||
|
<div class="query-actions">
|
||||||
|
<button onclick="executeSqlQuery()" class="primary-button">Execute Query</button>
|
||||||
|
<button onclick="clearSqlQuery()">Clear</button>
|
||||||
|
<button onclick="clearQueryHistory()" class="danger-button">Clear History</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="query-results">
|
||||||
|
<h3>Results</h3>
|
||||||
|
<div id="query-info" class="info-box"></div>
|
||||||
|
<div id="query-table" class="table-container"></div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
```
|
||||||
|
|
||||||
|
#### JavaScript Functions (api/index.js)
|
||||||
|
Add to [`api/index.js`](api/index.js:1):
|
||||||
|
```javascript
|
||||||
|
// Predefined query templates
|
||||||
|
const SQL_QUERY_TEMPLATES = {
|
||||||
|
recent_events: "SELECT id, pubkey, created_at, kind, substr(content, 1, 50) as content FROM events ORDER BY created_at DESC LIMIT 20",
|
||||||
|
event_stats: "SELECT * FROM event_stats",
|
||||||
|
subscriptions: "SELECT * FROM active_subscriptions_log ORDER BY created_at DESC",
|
||||||
|
top_pubkeys: "SELECT * FROM top_pubkeys_view",
|
||||||
|
event_kinds: "SELECT * FROM event_kinds_view ORDER BY count DESC",
|
||||||
|
time_stats: "SELECT * FROM time_stats_view"
|
||||||
|
};
|
||||||
|
|
||||||
|
// Query history management (localStorage)
|
||||||
|
const QUERY_HISTORY_KEY = 'c_relay_sql_history';
|
||||||
|
const MAX_HISTORY_ITEMS = 20;
|
||||||
|
|
||||||
|
// Load query history from localStorage
|
||||||
|
function loadQueryHistory() {
|
||||||
|
try {
|
||||||
|
const history = localStorage.getItem(QUERY_HISTORY_KEY);
|
||||||
|
return history ? JSON.parse(history) : [];
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Failed to load query history:', e);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save query to history
|
||||||
|
function saveQueryToHistory(query) {
|
||||||
|
if (!query || query.trim().length === 0) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
let history = loadQueryHistory();
|
||||||
|
|
||||||
|
// Remove duplicate if exists
|
||||||
|
history = history.filter(q => q !== query);
|
||||||
|
|
||||||
|
// Add to beginning
|
||||||
|
history.unshift(query);
|
||||||
|
|
||||||
|
// Limit size
|
||||||
|
if (history.length > MAX_HISTORY_ITEMS) {
|
||||||
|
history = history.slice(0, MAX_HISTORY_ITEMS);
|
||||||
|
}
|
||||||
|
|
||||||
|
localStorage.setItem(QUERY_HISTORY_KEY, JSON.stringify(history));
|
||||||
|
updateQueryDropdown();
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Failed to save query history:', e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear query history
|
||||||
|
function clearQueryHistory() {
|
||||||
|
if (confirm('Clear all query history?')) {
|
||||||
|
localStorage.removeItem(QUERY_HISTORY_KEY);
|
||||||
|
updateQueryDropdown();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update dropdown with history
|
||||||
|
function updateQueryDropdown() {
|
||||||
|
const historyGroup = document.getElementById('history-group');
|
||||||
|
if (!historyGroup) return;
|
||||||
|
|
||||||
|
// Clear existing history options
|
||||||
|
historyGroup.innerHTML = '';
|
||||||
|
|
||||||
|
const history = loadQueryHistory();
|
||||||
|
if (history.length === 0) {
|
||||||
|
const option = document.createElement('option');
|
||||||
|
option.value = '';
|
||||||
|
option.textContent = '(no history)';
|
||||||
|
option.disabled = true;
|
||||||
|
historyGroup.appendChild(option);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
history.forEach((query, index) => {
|
||||||
|
const option = document.createElement('option');
|
||||||
|
option.value = `history_${index}`;
|
||||||
|
// Truncate long queries for display
|
||||||
|
const displayQuery = query.length > 60 ? query.substring(0, 60) + '...' : query;
|
||||||
|
option.textContent = displayQuery;
|
||||||
|
option.dataset.query = query;
|
||||||
|
historyGroup.appendChild(option);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load selected query from dropdown
|
||||||
|
function loadSelectedQuery() {
|
||||||
|
const dropdown = document.getElementById('query-dropdown');
|
||||||
|
const selectedValue = dropdown.value;
|
||||||
|
|
||||||
|
if (!selectedValue) return;
|
||||||
|
|
||||||
|
let query = '';
|
||||||
|
|
||||||
|
// Check if it's a template
|
||||||
|
if (SQL_QUERY_TEMPLATES[selectedValue]) {
|
||||||
|
query = SQL_QUERY_TEMPLATES[selectedValue];
|
||||||
|
}
|
||||||
|
// Check if it's from history
|
||||||
|
else if (selectedValue.startsWith('history_')) {
|
||||||
|
const selectedOption = dropdown.options[dropdown.selectedIndex];
|
||||||
|
query = selectedOption.dataset.query;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (query) {
|
||||||
|
document.getElementById('sql-input').value = query;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset dropdown to placeholder
|
||||||
|
dropdown.value = '';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize query history on page load
|
||||||
|
document.addEventListener('DOMContentLoaded', function() {
|
||||||
|
updateQueryDropdown();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Clear the SQL query input
|
||||||
|
function clearSqlQuery() {
|
||||||
|
document.getElementById('sql-input').value = '';
|
||||||
|
document.getElementById('query-info').innerHTML = '';
|
||||||
|
document.getElementById('query-table').innerHTML = '';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track pending SQL queries by request ID
|
||||||
|
const pendingSqlQueries = new Map();
|
||||||
|
|
||||||
|
// Execute SQL query via admin API
|
||||||
|
async function executeSqlQuery() {
|
||||||
|
const query = document.getElementById('sql-input').value;
|
||||||
|
if (!query.trim()) {
|
||||||
|
showError('Please enter a SQL query');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Show loading state
|
||||||
|
document.getElementById('query-info').innerHTML = '<div class="loading">Executing query...</div>';
|
||||||
|
document.getElementById('query-table').innerHTML = '';
|
||||||
|
|
||||||
|
// Save to history (before execution, so it's saved even if query fails)
|
||||||
|
saveQueryToHistory(query.trim());
|
||||||
|
|
||||||
|
// Send query as kind 23456 admin command
|
||||||
|
const command = ["sql_query", query];
|
||||||
|
const requestEvent = await sendAdminCommand(command);
|
||||||
|
|
||||||
|
// Store query info for when response arrives
|
||||||
|
if (requestEvent && requestEvent.id) {
|
||||||
|
pendingSqlQueries.set(requestEvent.id, {
|
||||||
|
query: query,
|
||||||
|
timestamp: Date.now()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: Response will be handled by the event listener
|
||||||
|
// which will call displaySqlQueryResults() when response arrives
|
||||||
|
} catch (error) {
|
||||||
|
showError('Failed to execute query: ' + error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle SQL query response (called by event listener)
|
||||||
|
function handleSqlQueryResponse(response) {
|
||||||
|
// Check if this is a response to one of our queries
|
||||||
|
if (response.request_id && pendingSqlQueries.has(response.request_id)) {
|
||||||
|
const queryInfo = pendingSqlQueries.get(response.request_id);
|
||||||
|
pendingSqlQueries.delete(response.request_id);
|
||||||
|
|
||||||
|
// Display results
|
||||||
|
displaySqlQueryResults(response);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display SQL query results
|
||||||
|
function displaySqlQueryResults(response) {
|
||||||
|
const infoDiv = document.getElementById('query-info');
|
||||||
|
const tableDiv = document.getElementById('query-table');
|
||||||
|
|
||||||
|
if (response.status === 'error' || response.error) {
|
||||||
|
infoDiv.innerHTML = `<div class="error-message">❌ ${response.error || 'Query failed'}</div>`;
|
||||||
|
tableDiv.innerHTML = '';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show query info with request ID for debugging
|
||||||
|
const rowCount = response.row_count || 0;
|
||||||
|
const execTime = response.execution_time_ms || 0;
|
||||||
|
const requestId = response.request_id ? response.request_id.substring(0, 8) + '...' : 'unknown';
|
||||||
|
infoDiv.innerHTML = `
|
||||||
|
<div class="query-info-success">
|
||||||
|
<span>✅ Query executed successfully</span>
|
||||||
|
<span>Rows: ${rowCount}</span>
|
||||||
|
<span>Execution Time: ${execTime}ms</span>
|
||||||
|
<span class="request-id" title="${response.request_id || ''}">Request: ${requestId}</span>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Build results table
|
||||||
|
if (response.rows && response.rows.length > 0) {
|
||||||
|
let html = '<table class="sql-results-table"><thead><tr>';
|
||||||
|
response.columns.forEach(col => {
|
||||||
|
html += `<th>${escapeHtml(col)}</th>`;
|
||||||
|
});
|
||||||
|
html += '</tr></thead><tbody>';
|
||||||
|
|
||||||
|
response.rows.forEach(row => {
|
||||||
|
html += '<tr>';
|
||||||
|
row.forEach(cell => {
|
||||||
|
const cellValue = cell === null ? '<em>NULL</em>' : escapeHtml(String(cell));
|
||||||
|
html += `<td>${cellValue}</td>`;
|
||||||
|
});
|
||||||
|
html += '</tr>';
|
||||||
|
});
|
||||||
|
|
||||||
|
html += '</tbody></table>';
|
||||||
|
tableDiv.innerHTML = html;
|
||||||
|
} else {
|
||||||
|
tableDiv.innerHTML = '<p class="no-results">No results returned</p>';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to escape HTML
|
||||||
|
function escapeHtml(text) {
|
||||||
|
const div = document.createElement('div');
|
||||||
|
div.textContent = text;
|
||||||
|
return div.innerHTML;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example Queries
|
||||||
|
|
||||||
|
### Subscription Statistics
|
||||||
|
```sql
|
||||||
|
SELECT
|
||||||
|
date,
|
||||||
|
subscriptions_created,
|
||||||
|
subscriptions_ended,
|
||||||
|
avg_duration_seconds,
|
||||||
|
unique_clients
|
||||||
|
FROM subscription_analytics
|
||||||
|
ORDER BY date DESC
|
||||||
|
LIMIT 7;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Event Distribution by Kind
|
||||||
|
```sql
|
||||||
|
SELECT kind, count, percentage
|
||||||
|
FROM event_kinds_view
|
||||||
|
ORDER BY count DESC;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Recent Events by Specific Pubkey
|
||||||
|
```sql
|
||||||
|
SELECT id, created_at, kind, content
|
||||||
|
FROM events
|
||||||
|
WHERE pubkey = 'abc123...'
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT 20;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Active Subscriptions with Details
|
||||||
|
```sql
|
||||||
|
SELECT
|
||||||
|
subscription_id,
|
||||||
|
client_ip,
|
||||||
|
events_sent,
|
||||||
|
duration_seconds,
|
||||||
|
filter_json
|
||||||
|
FROM active_subscriptions_log
|
||||||
|
ORDER BY created_at DESC;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Database Size and Event Count
|
||||||
|
```sql
|
||||||
|
SELECT
|
||||||
|
(SELECT COUNT(*) FROM events) as total_events,
|
||||||
|
(SELECT COUNT(*) FROM subscription_events) as total_subscriptions,
|
||||||
|
(SELECT COUNT(*) FROM auth_rules WHERE active = 1) as active_rules;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration Options
|
||||||
|
|
||||||
|
Add to config table:
|
||||||
|
```sql
|
||||||
|
INSERT INTO config (key, value, data_type, description, category) VALUES
|
||||||
|
('sql_query_enabled', 'true', 'boolean', 'Enable SQL query admin API', 'admin'),
|
||||||
|
('sql_query_timeout', '5', 'integer', 'Query timeout in seconds', 'admin'),
|
||||||
|
('sql_query_row_limit', '1000', 'integer', 'Maximum rows per query', 'admin'),
|
||||||
|
('sql_query_size_limit', '1048576', 'integer', 'Maximum result size in bytes', 'admin'),
|
||||||
|
('sql_query_log_enabled', 'true', 'boolean', 'Log all SQL queries', 'admin');
|
||||||
|
```
|
||||||
|
|
||||||
|
## Security Considerations
|
||||||
|
|
||||||
|
### What This Protects Against
|
||||||
|
1. **Unauthorized Access** - Only admin can execute queries (cryptographic verification)
|
||||||
|
2. **Data Modification** - Read-only enforcement prevents accidental/malicious changes
|
||||||
|
3. **Resource Exhaustion** - Timeouts and limits prevent DoS
|
||||||
|
4. **Audit Trail** - All queries logged for security review
|
||||||
|
|
||||||
|
### What This Does NOT Protect Against
|
||||||
|
1. **Admin Compromise** - If admin private key is stolen, attacker has full read access
|
||||||
|
2. **Information Disclosure** - Admin can read all data (by design)
|
||||||
|
3. **Complex Attacks** - Sophisticated SQL injection might bypass simple keyword blocking
|
||||||
|
|
||||||
|
### Recommendations
|
||||||
|
1. **Secure Admin Key** - Store admin private key securely, never commit to git
|
||||||
|
2. **Monitor Query Logs** - Review query logs regularly for suspicious activity
|
||||||
|
3. **Backup Database** - Regular backups in case of issues
|
||||||
|
4. **Test Queries** - Test complex queries on development relay first
|
||||||
|
|
||||||
|
## Testing Plan
|
||||||
|
|
||||||
|
### Unit Tests
|
||||||
|
1. Query validation (blocked keywords, syntax)
|
||||||
|
2. Result formatting (JSON structure)
|
||||||
|
3. Error handling (timeouts, limits)
|
||||||
|
|
||||||
|
### Integration Tests
|
||||||
|
1. Execute queries through NIP-17 DM
|
||||||
|
2. Verify authentication (admin vs non-admin)
|
||||||
|
3. Test resource limits (timeout, row limit)
|
||||||
|
4. Test error responses
|
||||||
|
|
||||||
|
### Security Tests
|
||||||
|
1. Attempt blocked statements (INSERT, DELETE, etc.)
|
||||||
|
2. Attempt SQL injection patterns
|
||||||
|
3. Test query timeout with slow queries
|
||||||
|
4. Test row limit with large result sets
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
1. **Query History** - Store recent queries for quick re-execution
|
||||||
|
2. **Query Favorites** - Save frequently used queries
|
||||||
|
3. **Export Results** - Download results as CSV/JSON
|
||||||
|
4. **Query Builder** - Visual query builder for common operations
|
||||||
|
5. **Real-time Updates** - WebSocket updates for live data
|
||||||
|
6. **Query Sharing** - Share queries with other admins (if multi-admin support added)
|
||||||
|
|
||||||
|
## Migration Path
|
||||||
|
|
||||||
|
### Phase 1: Backend Implementation
|
||||||
|
1. Add query validation function
|
||||||
|
2. Add query execution function
|
||||||
|
3. Integrate with NIP-17 command handler
|
||||||
|
4. Add configuration options
|
||||||
|
5. Add query logging
|
||||||
|
|
||||||
|
### Phase 2: Frontend Implementation
|
||||||
|
1. Add SQL query section to index.html
|
||||||
|
2. Add query execution JavaScript
|
||||||
|
3. Add predefined query templates
|
||||||
|
4. Add results display formatting
|
||||||
|
|
||||||
|
### Phase 3: Testing and Documentation
|
||||||
|
1. Write unit tests
|
||||||
|
2. Write integration tests
|
||||||
|
3. Update user documentation
|
||||||
|
4. Create query examples guide
|
||||||
|
|
||||||
|
### Phase 4: Enhancement
|
||||||
|
1. Add query history
|
||||||
|
2. Add export functionality
|
||||||
|
3. Optimize performance
|
||||||
|
4. Add more predefined templates
|
||||||
258
docs/sql_test_design.md
Normal file
258
docs/sql_test_design.md
Normal file
@@ -0,0 +1,258 @@
|
|||||||
|
# SQL Query Test Script Design
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Test script for validating the SQL query admin API functionality. Tests query validation, execution, error handling, and security features.
|
||||||
|
|
||||||
|
## Script: tests/sql_test.sh
|
||||||
|
|
||||||
|
### Test Categories
|
||||||
|
|
||||||
|
#### 1. Query Validation Tests
|
||||||
|
- ✅ Valid SELECT queries accepted
|
||||||
|
- ❌ INSERT statements blocked
|
||||||
|
- ❌ UPDATE statements blocked
|
||||||
|
- ❌ DELETE statements blocked
|
||||||
|
- ❌ DROP statements blocked
|
||||||
|
- ❌ CREATE statements blocked
|
||||||
|
- ❌ ALTER statements blocked
|
||||||
|
- ❌ PRAGMA write operations blocked
|
||||||
|
|
||||||
|
#### 2. Query Execution Tests
|
||||||
|
- ✅ Simple SELECT query
|
||||||
|
- ✅ SELECT with WHERE clause
|
||||||
|
- ✅ SELECT with JOIN
|
||||||
|
- ✅ SELECT with ORDER BY and LIMIT
|
||||||
|
- ✅ Query against views
|
||||||
|
- ✅ Query with aggregate functions (COUNT, SUM, AVG)
|
||||||
|
|
||||||
|
#### 3. Response Format Tests
|
||||||
|
- ✅ Response includes request_id
|
||||||
|
- ✅ Response includes query_type
|
||||||
|
- ✅ Response includes columns array
|
||||||
|
- ✅ Response includes rows array
|
||||||
|
- ✅ Response includes row_count
|
||||||
|
- ✅ Response includes execution_time_ms
|
||||||
|
|
||||||
|
#### 4. Error Handling Tests
|
||||||
|
- ❌ Invalid SQL syntax
|
||||||
|
- ❌ Non-existent table
|
||||||
|
- ❌ Non-existent column
|
||||||
|
- ❌ Query timeout (if configurable)
|
||||||
|
|
||||||
|
#### 5. Security Tests
|
||||||
|
- ❌ SQL injection attempts blocked
|
||||||
|
- ❌ Nested query attacks blocked
|
||||||
|
- ❌ Comment-based attacks blocked
|
||||||
|
|
||||||
|
#### 6. Concurrent Query Tests
|
||||||
|
- ✅ Multiple queries in parallel
|
||||||
|
- ✅ Responses correctly correlated to requests
|
||||||
|
|
||||||
|
## Script Structure
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# SQL Query Admin API Test Script
|
||||||
|
# Tests the sql_query command functionality
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
RELAY_URL="${RELAY_URL:-ws://localhost:8888}"
|
||||||
|
ADMIN_PRIVKEY="${ADMIN_PRIVKEY:-}"
|
||||||
|
RELAY_PUBKEY="${RELAY_PUBKEY:-}"
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Test counters
|
||||||
|
TESTS_RUN=0
|
||||||
|
TESTS_PASSED=0
|
||||||
|
TESTS_FAILED=0
|
||||||
|
|
||||||
|
# Helper functions
|
||||||
|
print_test() {
|
||||||
|
echo -e "${YELLOW}TEST: $1${NC}"
|
||||||
|
TESTS_RUN=$((TESTS_RUN + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
print_pass() {
|
||||||
|
echo -e "${GREEN}✓ PASS: $1${NC}"
|
||||||
|
TESTS_PASSED=$((TESTS_PASSED + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
print_fail() {
|
||||||
|
echo -e "${RED}✗ FAIL: $1${NC}"
|
||||||
|
TESTS_FAILED=$((TESTS_FAILED + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send SQL query command
|
||||||
|
send_sql_query() {
|
||||||
|
local query="$1"
|
||||||
|
# Implementation using nostr CLI tools or curl
|
||||||
|
# Returns response JSON
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test functions
|
||||||
|
test_valid_select() {
|
||||||
|
print_test "Valid SELECT query"
|
||||||
|
local response=$(send_sql_query "SELECT * FROM events LIMIT 1")
|
||||||
|
if echo "$response" | grep -q '"query_type":"sql_query"'; then
|
||||||
|
print_pass "Valid SELECT accepted"
|
||||||
|
else
|
||||||
|
print_fail "Valid SELECT rejected"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_blocked_insert() {
|
||||||
|
print_test "INSERT statement blocked"
|
||||||
|
local response=$(send_sql_query "INSERT INTO events VALUES (...)")
|
||||||
|
if echo "$response" | grep -q '"error"'; then
|
||||||
|
print_pass "INSERT correctly blocked"
|
||||||
|
else
|
||||||
|
print_fail "INSERT not blocked"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# ... more test functions ...
|
||||||
|
|
||||||
|
# Main test execution
|
||||||
|
main() {
|
||||||
|
echo "================================"
|
||||||
|
echo "SQL Query Admin API Tests"
|
||||||
|
echo "================================"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check prerequisites
|
||||||
|
if [ -z "$ADMIN_PRIVKEY" ]; then
|
||||||
|
echo "Error: ADMIN_PRIVKEY not set"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run test suites
|
||||||
|
echo "1. Query Validation Tests"
|
||||||
|
test_valid_select
|
||||||
|
test_blocked_insert
|
||||||
|
test_blocked_update
|
||||||
|
test_blocked_delete
|
||||||
|
test_blocked_drop
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "2. Query Execution Tests"
|
||||||
|
test_simple_select
|
||||||
|
test_select_with_where
|
||||||
|
test_select_with_join
|
||||||
|
test_select_views
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "3. Response Format Tests"
|
||||||
|
test_response_format
|
||||||
|
test_request_id_correlation
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "4. Error Handling Tests"
|
||||||
|
test_invalid_syntax
|
||||||
|
test_nonexistent_table
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "5. Security Tests"
|
||||||
|
test_sql_injection
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "6. Concurrent Query Tests"
|
||||||
|
test_concurrent_queries
|
||||||
|
|
||||||
|
# Print summary
|
||||||
|
echo ""
|
||||||
|
echo "================================"
|
||||||
|
echo "Test Summary"
|
||||||
|
echo "================================"
|
||||||
|
echo "Tests Run: $TESTS_RUN"
|
||||||
|
echo "Tests Passed: $TESTS_PASSED"
|
||||||
|
echo "Tests Failed: $TESTS_FAILED"
|
||||||
|
|
||||||
|
if [ $TESTS_FAILED -eq 0 ]; then
|
||||||
|
echo -e "${GREEN}All tests passed!${NC}"
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo -e "${RED}Some tests failed${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
main "$@"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Test Data Setup
|
||||||
|
|
||||||
|
The script should work with the existing relay database without requiring special test data, using:
|
||||||
|
- Existing events table
|
||||||
|
- Existing views (event_stats, recent_events, etc.)
|
||||||
|
- Existing config table
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Set environment variables
|
||||||
|
export ADMIN_PRIVKEY="your_admin_private_key_hex"
|
||||||
|
export RELAY_PUBKEY="relay_public_key_hex"
|
||||||
|
export RELAY_URL="ws://localhost:8888"
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
./tests/sql_test.sh
|
||||||
|
|
||||||
|
# Run specific test category
|
||||||
|
./tests/sql_test.sh validation
|
||||||
|
./tests/sql_test.sh security
|
||||||
|
```
|
||||||
|
|
||||||
|
## Integration with CI/CD
|
||||||
|
|
||||||
|
The script should:
|
||||||
|
- Return exit code 0 on success, 1 on failure
|
||||||
|
- Output TAP (Test Anything Protocol) format for CI integration
|
||||||
|
- Be runnable in automated test pipelines
|
||||||
|
- Not require manual intervention
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
- `bash` (version 4+)
|
||||||
|
- `curl` or `websocat` for WebSocket communication
|
||||||
|
- `jq` for JSON parsing
|
||||||
|
- Nostr CLI tools (optional, for event signing)
|
||||||
|
- Running c-relay instance
|
||||||
|
|
||||||
|
## Example Output
|
||||||
|
|
||||||
|
```
|
||||||
|
================================
|
||||||
|
SQL Query Admin API Tests
|
||||||
|
================================
|
||||||
|
|
||||||
|
1. Query Validation Tests
|
||||||
|
TEST: Valid SELECT query
|
||||||
|
✓ PASS: Valid SELECT accepted
|
||||||
|
TEST: INSERT statement blocked
|
||||||
|
✓ PASS: INSERT correctly blocked
|
||||||
|
TEST: UPDATE statement blocked
|
||||||
|
✓ PASS: UPDATE correctly blocked
|
||||||
|
|
||||||
|
2. Query Execution Tests
|
||||||
|
TEST: Simple SELECT query
|
||||||
|
✓ PASS: Query executed successfully
|
||||||
|
TEST: SELECT with WHERE clause
|
||||||
|
✓ PASS: WHERE clause works correctly
|
||||||
|
|
||||||
|
...
|
||||||
|
|
||||||
|
================================
|
||||||
|
Test Summary
|
||||||
|
================================
|
||||||
|
Tests Run: 24
|
||||||
|
Tests Passed: 24
|
||||||
|
Tests Failed: 0
|
||||||
|
All tests passed!
|
||||||
200
docs/websocket_write_queue_design.md
Normal file
200
docs/websocket_write_queue_design.md
Normal file
@@ -0,0 +1,200 @@
|
|||||||
|
# WebSocket Write Queue Design
|
||||||
|
|
||||||
|
## Problem Statement
|
||||||
|
|
||||||
|
The current partial write handling implementation uses a single buffer per session, which fails when multiple events need to be sent to the same client in rapid succession. This causes:
|
||||||
|
|
||||||
|
1. First event gets partial write → queued successfully
|
||||||
|
2. Second event tries to write → **FAILS** with "write already pending"
|
||||||
|
3. Subsequent events fail similarly, causing data loss
|
||||||
|
|
||||||
|
### Server Log Evidence
|
||||||
|
```
|
||||||
|
[WARN] WS_FRAME_PARTIAL: EVENT partial write, sub=1 sent=3210 expected=5333
|
||||||
|
[TRACE] Queued partial write: len=2123
|
||||||
|
[WARN] WS_FRAME_PARTIAL: EVENT partial write, sub=1 sent=3210 expected=5333
|
||||||
|
[WARN] queue_websocket_write: write already pending, cannot queue new write
|
||||||
|
[ERROR] Failed to queue partial EVENT write for sub=1
|
||||||
|
```
|
||||||
|
|
||||||
|
## Root Cause
|
||||||
|
|
||||||
|
WebSocket frames must be sent **atomically** - you cannot interleave multiple frames. The current single-buffer approach correctly enforces this, but it rejects new writes instead of queuing them.
|
||||||
|
|
||||||
|
## Solution: Write Queue Architecture
|
||||||
|
|
||||||
|
### Design Principles
|
||||||
|
|
||||||
|
1. **Frame Atomicity**: Complete one WebSocket frame before starting the next
|
||||||
|
2. **Sequential Processing**: Process queued writes in FIFO order
|
||||||
|
3. **Memory Safety**: Proper cleanup on connection close or errors
|
||||||
|
4. **Thread Safety**: Protect queue operations with existing session lock
|
||||||
|
|
||||||
|
### Data Structures
|
||||||
|
|
||||||
|
#### Write Queue Node
|
||||||
|
```c
|
||||||
|
struct write_queue_node {
|
||||||
|
unsigned char* buffer; // Buffer with LWS_PRE space
|
||||||
|
size_t total_len; // Total length of data to write
|
||||||
|
size_t offset; // How much has been written so far
|
||||||
|
int write_type; // LWS_WRITE_TEXT, etc.
|
||||||
|
struct write_queue_node* next; // Next node in queue
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Per-Session Write Queue
|
||||||
|
```c
|
||||||
|
struct per_session_data {
|
||||||
|
// ... existing fields ...
|
||||||
|
|
||||||
|
// Write queue for handling multiple pending writes
|
||||||
|
struct write_queue_node* write_queue_head; // First item to write
|
||||||
|
struct write_queue_node* write_queue_tail; // Last item in queue
|
||||||
|
int write_queue_length; // Number of items in queue
|
||||||
|
int write_in_progress; // Flag: 1 if currently writing
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Algorithm Flow
|
||||||
|
|
||||||
|
#### 1. Enqueue Write (`queue_websocket_write`)
|
||||||
|
|
||||||
|
```
|
||||||
|
IF write_queue is empty AND no write in progress:
|
||||||
|
- Attempt immediate write with lws_write()
|
||||||
|
- IF complete:
|
||||||
|
- Return success
|
||||||
|
- ELSE (partial write):
|
||||||
|
- Create queue node with remaining data
|
||||||
|
- Add to queue
|
||||||
|
- Set write_in_progress flag
|
||||||
|
- Request LWS_CALLBACK_SERVER_WRITEABLE
|
||||||
|
ELSE:
|
||||||
|
- Create queue node with full data
|
||||||
|
- Append to queue tail
|
||||||
|
- IF no write in progress:
|
||||||
|
- Request LWS_CALLBACK_SERVER_WRITEABLE
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2. Process Queue (`process_pending_write`)
|
||||||
|
|
||||||
|
```
|
||||||
|
WHILE write_queue is not empty:
|
||||||
|
- Get head node
|
||||||
|
- Calculate remaining data (total_len - offset)
|
||||||
|
- Attempt write with lws_write()
|
||||||
|
|
||||||
|
IF write fails (< 0):
|
||||||
|
- Log error
|
||||||
|
- Remove and free head node
|
||||||
|
- Continue to next node
|
||||||
|
|
||||||
|
ELSE IF partial write (< remaining):
|
||||||
|
- Update offset
|
||||||
|
- Request LWS_CALLBACK_SERVER_WRITEABLE
|
||||||
|
- Break (wait for next callback)
|
||||||
|
|
||||||
|
ELSE (complete write):
|
||||||
|
- Remove and free head node
|
||||||
|
- Continue to next node
|
||||||
|
|
||||||
|
IF queue is empty:
|
||||||
|
- Clear write_in_progress flag
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 3. Cleanup (`LWS_CALLBACK_CLOSED`)
|
||||||
|
|
||||||
|
```
|
||||||
|
WHILE write_queue is not empty:
|
||||||
|
- Get head node
|
||||||
|
- Free buffer
|
||||||
|
- Free node
|
||||||
|
- Move to next
|
||||||
|
Clear queue pointers
|
||||||
|
```
|
||||||
|
|
||||||
|
### Memory Management
|
||||||
|
|
||||||
|
1. **Allocation**: Each queue node allocates buffer with `LWS_PRE + data_len`
|
||||||
|
2. **Ownership**: Queue owns all buffers until write completes or connection closes
|
||||||
|
3. **Deallocation**: Free buffer and node when:
|
||||||
|
- Write completes successfully
|
||||||
|
- Write fails with error
|
||||||
|
- Connection closes
|
||||||
|
|
||||||
|
### Thread Safety
|
||||||
|
|
||||||
|
- Use existing `pss->session_lock` to protect queue operations
|
||||||
|
- Lock during:
|
||||||
|
- Enqueue operations
|
||||||
|
- Dequeue operations
|
||||||
|
- Queue traversal for cleanup
|
||||||
|
|
||||||
|
### Performance Considerations
|
||||||
|
|
||||||
|
1. **Queue Length Limit**: Implement max queue length (e.g., 100 items) to prevent memory exhaustion
|
||||||
|
2. **Memory Pressure**: Monitor total queued bytes per session
|
||||||
|
3. **Backpressure**: If queue exceeds limit, close connection with NOTICE
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
|
||||||
|
1. **Allocation Failure**: Return error, log, send NOTICE to client
|
||||||
|
2. **Write Failure**: Remove failed frame, continue with next
|
||||||
|
3. **Queue Overflow**: Close connection with appropriate NOTICE
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
### Phase 1: Data Structure Changes
|
||||||
|
1. Add `write_queue_node` structure to `websockets.h`
|
||||||
|
2. Update `per_session_data` with queue fields
|
||||||
|
3. Remove old single-buffer fields
|
||||||
|
|
||||||
|
### Phase 2: Queue Operations
|
||||||
|
1. Implement `enqueue_write()` helper
|
||||||
|
2. Implement `dequeue_write()` helper
|
||||||
|
3. Update `queue_websocket_write()` to use queue
|
||||||
|
4. Update `process_pending_write()` to process queue
|
||||||
|
|
||||||
|
### Phase 3: Integration
|
||||||
|
1. Update all `lws_write()` call sites
|
||||||
|
2. Update `LWS_CALLBACK_CLOSED` cleanup
|
||||||
|
3. Add queue length monitoring
|
||||||
|
|
||||||
|
### Phase 4: Testing
|
||||||
|
1. Test with rapid multiple events to same client
|
||||||
|
2. Test with large events (>4KB)
|
||||||
|
3. Test under load with concurrent connections
|
||||||
|
4. Verify no "Invalid frame header" errors
|
||||||
|
|
||||||
|
## Expected Outcomes
|
||||||
|
|
||||||
|
1. **No More Rejections**: All writes queued successfully
|
||||||
|
2. **Frame Integrity**: Complete frames sent atomically
|
||||||
|
3. **Memory Safety**: Proper cleanup on all paths
|
||||||
|
4. **Performance**: Minimal overhead for queue management
|
||||||
|
|
||||||
|
## Metrics to Monitor
|
||||||
|
|
||||||
|
1. Average queue length per session
|
||||||
|
2. Maximum queue length observed
|
||||||
|
3. Queue overflow events (if limit implemented)
|
||||||
|
4. Write completion rate
|
||||||
|
5. Partial write frequency
|
||||||
|
|
||||||
|
## Alternative Approaches Considered
|
||||||
|
|
||||||
|
### 1. Larger Single Buffer
|
||||||
|
**Rejected**: Doesn't solve the fundamental problem of multiple concurrent writes
|
||||||
|
|
||||||
|
### 2. Immediate Write Retry
|
||||||
|
**Rejected**: Could cause busy-waiting and CPU waste
|
||||||
|
|
||||||
|
### 3. Drop Frames on Conflict
|
||||||
|
**Rejected**: Violates reliability requirements
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
- libwebsockets documentation on `lws_write()` and `LWS_CALLBACK_SERVER_WRITEABLE`
|
||||||
|
- WebSocket RFC 6455 on frame structure
|
||||||
|
- Nostr NIP-01 on relay-to-client communication
|
||||||
364
increment_and_push.sh
Executable file
364
increment_and_push.sh
Executable file
@@ -0,0 +1,364 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
print_status() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||||
|
print_success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||||
|
print_warning() { echo -e "${YELLOW}[WARNING]${NC} $1"; }
|
||||||
|
print_error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||||
|
|
||||||
|
# Global variables
|
||||||
|
COMMIT_MESSAGE=""
|
||||||
|
RELEASE_MODE=false
|
||||||
|
|
||||||
|
show_usage() {
|
||||||
|
echo "C-Relay Increment and Push Script"
|
||||||
|
echo ""
|
||||||
|
echo "Usage:"
|
||||||
|
echo " $0 \"commit message\" - Default: increment patch, commit & push"
|
||||||
|
echo " $0 -r \"commit message\" - Release: increment minor, create release"
|
||||||
|
echo ""
|
||||||
|
echo "Examples:"
|
||||||
|
echo " $0 \"Fixed event validation bug\""
|
||||||
|
echo " $0 --release \"Major release with new features\""
|
||||||
|
echo ""
|
||||||
|
echo "Default Mode (patch increment):"
|
||||||
|
echo " - Increment patch version (v1.2.3 → v1.2.4)"
|
||||||
|
echo " - Git add, commit with message, and push"
|
||||||
|
echo ""
|
||||||
|
echo "Release Mode (-r flag):"
|
||||||
|
echo " - Increment minor version, zero patch (v1.2.3 → v1.3.0)"
|
||||||
|
echo " - Git add, commit, push, and create Gitea release"
|
||||||
|
echo ""
|
||||||
|
echo "Requirements for Release Mode:"
|
||||||
|
echo " - Gitea token in ~/.gitea_token for release uploads"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parse command line arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
-r|--release)
|
||||||
|
RELEASE_MODE=true
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-h|--help)
|
||||||
|
show_usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
# First non-flag argument is the commit message
|
||||||
|
if [[ -z "$COMMIT_MESSAGE" ]]; then
|
||||||
|
COMMIT_MESSAGE="$1"
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Validate inputs
|
||||||
|
if [[ -z "$COMMIT_MESSAGE" ]]; then
|
||||||
|
print_error "Commit message is required"
|
||||||
|
echo ""
|
||||||
|
show_usage
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if we're in a git repository
|
||||||
|
check_git_repo() {
|
||||||
|
if ! git rev-parse --git-dir > /dev/null 2>&1; then
|
||||||
|
print_error "Not in a git repository"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to get current version and increment appropriately
|
||||||
|
increment_version() {
|
||||||
|
local increment_type="$1" # "patch" or "minor"
|
||||||
|
|
||||||
|
print_status "Getting current version..."
|
||||||
|
|
||||||
|
# Get the highest version tag (not chronologically latest)
|
||||||
|
LATEST_TAG=$(git tag -l 'v*.*.*' | sort -V | tail -n 1 || echo "")
|
||||||
|
if [[ -z "$LATEST_TAG" ]]; then
|
||||||
|
LATEST_TAG="v0.0.0"
|
||||||
|
print_warning "No version tags found, starting from $LATEST_TAG"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract version components (remove 'v' prefix)
|
||||||
|
VERSION=${LATEST_TAG#v}
|
||||||
|
|
||||||
|
# Parse major.minor.patch using regex
|
||||||
|
if [[ $VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||||
|
MAJOR=${BASH_REMATCH[1]}
|
||||||
|
MINOR=${BASH_REMATCH[2]}
|
||||||
|
PATCH=${BASH_REMATCH[3]}
|
||||||
|
else
|
||||||
|
print_error "Invalid version format in tag: $LATEST_TAG"
|
||||||
|
print_error "Expected format: v0.1.0"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Increment version based on type
|
||||||
|
if [[ "$increment_type" == "minor" ]]; then
|
||||||
|
# Minor release: increment minor, zero patch
|
||||||
|
NEW_MINOR=$((MINOR + 1))
|
||||||
|
NEW_PATCH=0
|
||||||
|
NEW_VERSION="v${MAJOR}.${NEW_MINOR}.${NEW_PATCH}"
|
||||||
|
print_status "Release mode: incrementing minor version"
|
||||||
|
else
|
||||||
|
# Default: increment patch
|
||||||
|
NEW_PATCH=$((PATCH + 1))
|
||||||
|
NEW_VERSION="v${MAJOR}.${MINOR}.${NEW_PATCH}"
|
||||||
|
print_status "Default mode: incrementing patch version"
|
||||||
|
fi
|
||||||
|
|
||||||
|
print_status "Current version: $LATEST_TAG"
|
||||||
|
print_status "New version: $NEW_VERSION"
|
||||||
|
|
||||||
|
# Update version in src/main.h
|
||||||
|
update_version_in_header "$NEW_VERSION" "$MAJOR" "${NEW_MINOR:-$MINOR}" "${NEW_PATCH:-$PATCH}"
|
||||||
|
|
||||||
|
# Export for use in other functions
|
||||||
|
export NEW_VERSION
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to update version macros in src/main.h
|
||||||
|
update_version_in_header() {
|
||||||
|
local new_version="$1"
|
||||||
|
local major="$2"
|
||||||
|
local minor="$3"
|
||||||
|
local patch="$4"
|
||||||
|
|
||||||
|
print_status "Updating version in src/main.h..."
|
||||||
|
|
||||||
|
# Check if src/main.h exists
|
||||||
|
if [[ ! -f "src/main.h" ]]; then
|
||||||
|
print_error "src/main.h not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Update VERSION macro
|
||||||
|
sed -i "s/#define VERSION \".*\"/#define VERSION \"$new_version\"/" src/main.h
|
||||||
|
|
||||||
|
# Update VERSION_MAJOR macro
|
||||||
|
sed -i "s/#define VERSION_MAJOR [0-9]\+/#define VERSION_MAJOR $major/" src/main.h
|
||||||
|
|
||||||
|
# Update VERSION_MINOR macro
|
||||||
|
sed -i "s/#define VERSION_MINOR .*/#define VERSION_MINOR $minor/" src/main.h
|
||||||
|
|
||||||
|
# Update VERSION_PATCH macro
|
||||||
|
sed -i "s/#define VERSION_PATCH [0-9]\+/#define VERSION_PATCH $patch/" src/main.h
|
||||||
|
|
||||||
|
print_success "Updated version in src/main.h to $new_version"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to commit and push changes
|
||||||
|
git_commit_and_push() {
|
||||||
|
print_status "Preparing git commit..."
|
||||||
|
|
||||||
|
# Stage all changes
|
||||||
|
if git add . > /dev/null 2>&1; then
|
||||||
|
print_success "Staged all changes"
|
||||||
|
else
|
||||||
|
print_error "Failed to stage changes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if there are changes to commit
|
||||||
|
if git diff --staged --quiet; then
|
||||||
|
print_warning "No changes to commit"
|
||||||
|
else
|
||||||
|
# Commit changes
|
||||||
|
if git commit -m "$NEW_VERSION - $COMMIT_MESSAGE" > /dev/null 2>&1; then
|
||||||
|
print_success "Committed changes"
|
||||||
|
else
|
||||||
|
print_error "Failed to commit changes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create new git tag
|
||||||
|
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
||||||
|
print_success "Created tag: $NEW_VERSION"
|
||||||
|
else
|
||||||
|
print_warning "Tag $NEW_VERSION already exists"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Push changes and tags
|
||||||
|
print_status "Pushing to remote repository..."
|
||||||
|
if git push > /dev/null 2>&1; then
|
||||||
|
print_success "Pushed changes"
|
||||||
|
else
|
||||||
|
print_error "Failed to push changes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Push only the new tag to avoid conflicts with existing tags
|
||||||
|
if git push origin "$NEW_VERSION" > /dev/null 2>&1; then
|
||||||
|
print_success "Pushed tag: $NEW_VERSION"
|
||||||
|
else
|
||||||
|
print_warning "Tag push failed, trying force push..."
|
||||||
|
if git push --force origin "$NEW_VERSION" > /dev/null 2>&1; then
|
||||||
|
print_success "Force-pushed updated tag: $NEW_VERSION"
|
||||||
|
else
|
||||||
|
print_error "Failed to push tag: $NEW_VERSION"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to commit and push changes without creating a tag (tag already created)
|
||||||
|
git_commit_and_push_no_tag() {
|
||||||
|
print_status "Preparing git commit..."
|
||||||
|
|
||||||
|
# Stage all changes
|
||||||
|
if git add . > /dev/null 2>&1; then
|
||||||
|
print_success "Staged all changes"
|
||||||
|
else
|
||||||
|
print_error "Failed to stage changes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if there are changes to commit
|
||||||
|
if git diff --staged --quiet; then
|
||||||
|
print_warning "No changes to commit"
|
||||||
|
else
|
||||||
|
# Commit changes
|
||||||
|
if git commit -m "$NEW_VERSION - $COMMIT_MESSAGE" > /dev/null 2>&1; then
|
||||||
|
print_success "Committed changes"
|
||||||
|
else
|
||||||
|
print_error "Failed to commit changes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Push changes and tags
|
||||||
|
print_status "Pushing to remote repository..."
|
||||||
|
if git push > /dev/null 2>&1; then
|
||||||
|
print_success "Pushed changes"
|
||||||
|
else
|
||||||
|
print_error "Failed to push changes"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Push only the new tag to avoid conflicts with existing tags
|
||||||
|
if git push origin "$NEW_VERSION" > /dev/null 2>&1; then
|
||||||
|
print_success "Pushed tag: $NEW_VERSION"
|
||||||
|
else
|
||||||
|
print_warning "Tag push failed, trying force push..."
|
||||||
|
if git push --force origin "$NEW_VERSION" > /dev/null 2>&1; then
|
||||||
|
print_success "Force-pushed updated tag: $NEW_VERSION"
|
||||||
|
else
|
||||||
|
print_error "Failed to push tag: $NEW_VERSION"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to create Gitea release
|
||||||
|
create_gitea_release() {
|
||||||
|
print_status "Creating Gitea release..."
|
||||||
|
|
||||||
|
# Check for Gitea token
|
||||||
|
if [[ ! -f "$HOME/.gitea_token" ]]; then
|
||||||
|
print_warning "No ~/.gitea_token found. Skipping release creation."
|
||||||
|
print_warning "Create ~/.gitea_token with your Gitea access token to enable releases."
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
local token=$(cat "$HOME/.gitea_token" | tr -d '\n\r')
|
||||||
|
local api_url="https://git.laantungir.net/api/v1/repos/laantungir/c-relay"
|
||||||
|
|
||||||
|
# Create release
|
||||||
|
print_status "Creating release $NEW_VERSION..."
|
||||||
|
local response=$(curl -s -X POST "$api_url/releases" \
|
||||||
|
-H "Authorization: token $token" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d "{\"tag_name\": \"$NEW_VERSION\", \"name\": \"$NEW_VERSION\", \"body\": \"$COMMIT_MESSAGE\"}")
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"id"'; then
|
||||||
|
print_success "Created release $NEW_VERSION"
|
||||||
|
return 0
|
||||||
|
elif echo "$response" | grep -q "already exists"; then
|
||||||
|
print_warning "Release $NEW_VERSION already exists"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "Failed to create release $NEW_VERSION"
|
||||||
|
print_error "Response: $response"
|
||||||
|
|
||||||
|
# Try to check if the release exists anyway
|
||||||
|
print_status "Checking if release exists..."
|
||||||
|
local check_response=$(curl -s -H "Authorization: token $token" "$api_url/releases/tags/$NEW_VERSION")
|
||||||
|
if echo "$check_response" | grep -q '"id"'; then
|
||||||
|
print_warning "Release exists but creation response was unexpected"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
print_error "Release does not exist and creation failed"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main execution
|
||||||
|
main() {
|
||||||
|
print_status "C-Relay Increment and Push Script"
|
||||||
|
|
||||||
|
# Check prerequisites
|
||||||
|
check_git_repo
|
||||||
|
|
||||||
|
if [[ "$RELEASE_MODE" == true ]]; then
|
||||||
|
print_status "=== RELEASE MODE ==="
|
||||||
|
|
||||||
|
# Increment minor version for releases
|
||||||
|
increment_version "minor"
|
||||||
|
|
||||||
|
# Create new git tag BEFORE compilation so version.h picks it up
|
||||||
|
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
||||||
|
print_success "Created tag: $NEW_VERSION"
|
||||||
|
else
|
||||||
|
print_warning "Tag $NEW_VERSION already exists, removing and recreating..."
|
||||||
|
git tag -d "$NEW_VERSION" > /dev/null 2>&1
|
||||||
|
git tag "$NEW_VERSION" > /dev/null 2>&1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Commit and push (but skip tag creation since we already did it)
|
||||||
|
git_commit_and_push_no_tag
|
||||||
|
|
||||||
|
# Create Gitea release
|
||||||
|
if create_gitea_release; then
|
||||||
|
print_success "Release $NEW_VERSION completed successfully!"
|
||||||
|
else
|
||||||
|
print_error "Release creation failed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
else
|
||||||
|
print_status "=== DEFAULT MODE ==="
|
||||||
|
|
||||||
|
# Increment patch version for regular commits
|
||||||
|
increment_version "patch"
|
||||||
|
|
||||||
|
# Create new git tag BEFORE compilation so version.h picks it up
|
||||||
|
if git tag "$NEW_VERSION" > /dev/null 2>&1; then
|
||||||
|
print_success "Created tag: $NEW_VERSION"
|
||||||
|
else
|
||||||
|
print_warning "Tag $NEW_VERSION already exists, removing and recreating..."
|
||||||
|
git tag -d "$NEW_VERSION" > /dev/null 2>&1
|
||||||
|
git tag "$NEW_VERSION" > /dev/null 2>&1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Commit and push (but skip tag creation since we already did it)
|
||||||
|
git_commit_and_push_no_tag
|
||||||
|
|
||||||
|
print_success "Increment and push completed successfully!"
|
||||||
|
print_status "Version $NEW_VERSION pushed to repository"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Execute main function
|
||||||
|
main
|
||||||
@@ -133,6 +133,11 @@ if [ -n "$PORT_OVERRIDE" ]; then
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Validate strict port flag (only makes sense with port override)
|
||||||
|
if [ "$USE_TEST_KEYS" = true ] && [ -z "$PORT_OVERRIDE" ]; then
|
||||||
|
echo "WARNING: --strict-port is always used with test keys. Consider specifying a custom port with -p."
|
||||||
|
fi
|
||||||
|
|
||||||
# Validate debug level if provided
|
# Validate debug level if provided
|
||||||
if [ -n "$DEBUG_LEVEL" ]; then
|
if [ -n "$DEBUG_LEVEL" ]; then
|
||||||
if ! [[ "$DEBUG_LEVEL" =~ ^[0-5]$ ]]; then
|
if ! [[ "$DEBUG_LEVEL" =~ ^[0-5]$ ]]; then
|
||||||
@@ -163,6 +168,8 @@ if [ "$HELP" = true ]; then
|
|||||||
echo " $0 # Fresh start with random keys"
|
echo " $0 # Fresh start with random keys"
|
||||||
echo " $0 -a <admin-hex> -r <relay-hex> # Use custom keys"
|
echo " $0 -a <admin-hex> -r <relay-hex> # Use custom keys"
|
||||||
echo " $0 -a <admin-hex> -p 9000 # Custom admin key on port 9000"
|
echo " $0 -a <admin-hex> -p 9000 # Custom admin key on port 9000"
|
||||||
|
echo " $0 -p 7777 --strict-port # Fail if port 7777 unavailable (no fallback)"
|
||||||
|
echo " $0 -p 8080 --strict-port -d=3 # Custom port with strict binding and debug"
|
||||||
echo " $0 --debug-level=3 # Start with debug level 3 (info)"
|
echo " $0 --debug-level=3 # Start with debug level 3 (info)"
|
||||||
echo " $0 -d=5 # Start with debug level 5 (trace)"
|
echo " $0 -d=5 # Start with debug level 5 (trace)"
|
||||||
echo " $0 --preserve-database # Preserve existing database and keys"
|
echo " $0 --preserve-database # Preserve existing database and keys"
|
||||||
|
|||||||
53
notes.txt
53
notes.txt
@@ -37,4 +37,55 @@ You're all set up now - just wait for the next crash and then run the coredumpct
|
|||||||
Even simpler: Use this one-liner
|
Even simpler: Use this one-liner
|
||||||
# Start relay and immediately attach gdb
|
# Start relay and immediately attach gdb
|
||||||
cd /usr/local/bin/c_relay
|
cd /usr/local/bin/c_relay
|
||||||
sudo -u c-relay ./c_relay --debug-level=5 & sleep 2 && sudo gdb -p $(pgrep c_relay)
|
sudo -u c-relay ./c_relay --debug-level=5 & sleep 2 && sudo gdb -p $(pgrep c_relay)
|
||||||
|
|
||||||
|
Inside gdb, after attaching:
|
||||||
|
|
||||||
|
(gdb) continue
|
||||||
|
Or shorter:
|
||||||
|
(gdb) c
|
||||||
|
|
||||||
|
|
||||||
|
How to View the Logs
|
||||||
|
Check systemd journal:
|
||||||
|
# View all c-relay logs
|
||||||
|
sudo journalctl -u c-relay
|
||||||
|
|
||||||
|
# View recent logs (last 50 lines)
|
||||||
|
sudo journalctl -u c-relay -n 50
|
||||||
|
|
||||||
|
# Follow logs in real-time
|
||||||
|
sudo journalctl -u c-relay -f
|
||||||
|
|
||||||
|
# View logs since last boot
|
||||||
|
sudo journalctl -u c-relay -b
|
||||||
|
|
||||||
|
Check if service is running:
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
To immediately trim the syslog file size:
|
||||||
|
|
||||||
|
Safe Syslog Truncation
|
||||||
|
Stop syslog service first:
|
||||||
|
sudo systemctl stop rsyslog
|
||||||
|
|
||||||
|
Truncate the syslog file:
|
||||||
|
sudo truncate -s 0 /var/log/syslog
|
||||||
|
|
||||||
|
Restart syslog service:
|
||||||
|
sudo systemctl start rsyslog
|
||||||
|
sudo systemctl status rsyslog
|
||||||
|
|
||||||
|
|
||||||
|
sudo -u c-relay ./c_relay --debug-level=5 -r 85d0b37e2ae822966dcadd06b2dc9368cde73865f90ea4d44f8b57d47ef0820a -a 1ec454734dcbf6fe54901ce25c0c7c6bca5edd89443416761fadc321d38df139
|
||||||
|
|
||||||
|
./c_relay_static_x86_64 -p 7889 --debug-level=5 -r 85d0b37e2ae822966dcadd06b2dc9368cde73865f90ea4d44f8b57d47ef0820a -a 1ec454734dcbf6fe54901ce25c0c7c6bca5edd89443416761fadc321d38df139
|
||||||
|
|
||||||
|
|
||||||
|
sudo ufw allow 8888/tcp
|
||||||
|
sudo ufw delete allow 8888/tcp
|
||||||
|
|
||||||
|
lsof -i :7777
|
||||||
|
kill $(lsof -t -i :7777)
|
||||||
|
kill -9 $(lsof -t -i :7777)
|
||||||
46
src/api.h
46
src/api.h
@@ -1,8 +1,9 @@
|
|||||||
// API module for serving embedded web content
|
// API module for serving embedded web content and admin API functions
|
||||||
#ifndef API_H
|
#ifndef API_H
|
||||||
#define API_H
|
#define API_H
|
||||||
|
|
||||||
#include <libwebsockets.h>
|
#include <libwebsockets.h>
|
||||||
|
#include <cjson/cJSON.h>
|
||||||
|
|
||||||
// Embedded file session data structure for managing buffer lifetime
|
// Embedded file session data structure for managing buffer lifetime
|
||||||
struct embedded_file_session_data {
|
struct embedded_file_session_data {
|
||||||
@@ -14,10 +15,53 @@ struct embedded_file_session_data {
|
|||||||
int body_sent;
|
int body_sent;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Configuration change pending structure
|
||||||
|
typedef struct pending_config_change {
|
||||||
|
char admin_pubkey[65]; // Who requested the change
|
||||||
|
char config_key[128]; // What config to change
|
||||||
|
char old_value[256]; // Current value
|
||||||
|
char new_value[256]; // Requested new value
|
||||||
|
time_t timestamp; // When requested
|
||||||
|
char change_id[33]; // Unique ID for this change (first 32 chars of hash)
|
||||||
|
struct pending_config_change* next; // Linked list for concurrent changes
|
||||||
|
} pending_config_change_t;
|
||||||
|
|
||||||
// Handle HTTP request for embedded API files
|
// Handle HTTP request for embedded API files
|
||||||
int handle_embedded_file_request(struct lws* wsi, const char* requested_uri);
|
int handle_embedded_file_request(struct lws* wsi, const char* requested_uri);
|
||||||
|
|
||||||
// Generate stats JSON from database queries
|
// Generate stats JSON from database queries
|
||||||
char* generate_stats_json(void);
|
char* generate_stats_json(void);
|
||||||
|
|
||||||
|
// Generate human-readable stats text
|
||||||
|
char* generate_stats_text(void);
|
||||||
|
|
||||||
|
// Generate config text from database
|
||||||
|
char* generate_config_text(void);
|
||||||
|
|
||||||
|
// Send admin response with request ID correlation
|
||||||
|
int send_admin_response(const char* sender_pubkey, const char* response_content, const char* request_id,
|
||||||
|
char* error_message, size_t error_size, struct lws* wsi);
|
||||||
|
|
||||||
|
// Configuration change system functions
|
||||||
|
int parse_config_command(const char* message, char* key, char* value);
|
||||||
|
int validate_config_change(const char* key, const char* value);
|
||||||
|
char* store_pending_config_change(const char* admin_pubkey, const char* key,
|
||||||
|
const char* old_value, const char* new_value);
|
||||||
|
pending_config_change_t* find_pending_change(const char* admin_pubkey, const char* change_id);
|
||||||
|
int apply_config_change(const char* key, const char* value);
|
||||||
|
void cleanup_expired_pending_changes(void);
|
||||||
|
int handle_config_confirmation(const char* admin_pubkey, const char* response);
|
||||||
|
char* generate_config_change_confirmation(const char* key, const char* old_value, const char* new_value);
|
||||||
|
int process_config_change_request(const char* admin_pubkey, const char* message);
|
||||||
|
|
||||||
|
// SQL query functions
|
||||||
|
int validate_sql_query(const char* query, char* error_message, size_t error_size);
|
||||||
|
char* execute_sql_query(const char* query, const char* request_id, char* error_message, size_t error_size);
|
||||||
|
int handle_sql_query_unified(cJSON* event, const char* query, char* error_message, size_t error_size, struct lws* wsi);
|
||||||
|
|
||||||
|
// Monitoring system functions
|
||||||
|
void monitoring_on_event_stored(void);
|
||||||
|
void monitoring_on_subscription_change(void);
|
||||||
|
int get_monitoring_throttle_seconds(void);
|
||||||
|
|
||||||
#endif // API_H
|
#endif // API_H
|
||||||
87
src/config.c
87
src/config.c
@@ -3,6 +3,19 @@
|
|||||||
#include "debug.h"
|
#include "debug.h"
|
||||||
#include "default_config_event.h"
|
#include "default_config_event.h"
|
||||||
#include "dm_admin.h"
|
#include "dm_admin.h"
|
||||||
|
|
||||||
|
// Undefine VERSION macros before including nostr_core.h to avoid redefinition warnings
|
||||||
|
// This must come AFTER default_config_event.h so that RELAY_VERSION macro expansion works correctly
|
||||||
|
#ifdef VERSION
|
||||||
|
#undef VERSION
|
||||||
|
#endif
|
||||||
|
#ifdef VERSION_MINOR
|
||||||
|
#undef VERSION_MINOR
|
||||||
|
#endif
|
||||||
|
#ifdef VERSION_PATCH
|
||||||
|
#undef VERSION_PATCH
|
||||||
|
#endif
|
||||||
|
|
||||||
#include "../nostr_core_lib/nostr_core/nostr_core.h"
|
#include "../nostr_core_lib/nostr_core/nostr_core.h"
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
@@ -64,7 +77,7 @@ int process_admin_config_event(cJSON* event, char* error_message, size_t error_s
|
|||||||
// Forward declaration for relay info initialization
|
// Forward declaration for relay info initialization
|
||||||
void init_relay_info(void);
|
void init_relay_info(void);
|
||||||
int add_auth_rule_from_config(const char* rule_type, const char* pattern_type,
|
int add_auth_rule_from_config(const char* rule_type, const char* pattern_type,
|
||||||
const char* pattern_value, const char* action);
|
const char* pattern_value);
|
||||||
int remove_auth_rule_from_config(const char* rule_type, const char* pattern_type,
|
int remove_auth_rule_from_config(const char* rule_type, const char* pattern_type,
|
||||||
const char* pattern_value);
|
const char* pattern_value);
|
||||||
int is_config_table_ready(void);
|
int is_config_table_ready(void);
|
||||||
@@ -824,26 +837,7 @@ int startup_existing_relay(const char* relay_pubkey, const cli_options_t* cli_op
|
|||||||
|
|
||||||
// NOTE: Database is already initialized in main.c before calling this function
|
// NOTE: Database is already initialized in main.c before calling this function
|
||||||
// Config table should already exist with complete configuration
|
// Config table should already exist with complete configuration
|
||||||
|
// CLI overrides will be applied after this function returns in main.c
|
||||||
// Check if CLI overrides need to be applied
|
|
||||||
int has_overrides = 0;
|
|
||||||
if (cli_options) {
|
|
||||||
if (cli_options->port_override > 0) has_overrides = 1;
|
|
||||||
if (cli_options->admin_pubkey_override[0] != '\0') has_overrides = 1;
|
|
||||||
if (cli_options->relay_privkey_override[0] != '\0') has_overrides = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (has_overrides) {
|
|
||||||
// Apply CLI overrides to existing database
|
|
||||||
DEBUG_INFO("Applying CLI overrides to existing database");
|
|
||||||
if (apply_cli_overrides_atomic(cli_options) != 0) {
|
|
||||||
DEBUG_ERROR("Failed to apply CLI overrides to existing database");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// No CLI overrides - config table is already available
|
|
||||||
DEBUG_INFO("No CLI overrides - config table is already available");
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@@ -2067,28 +2061,27 @@ int process_admin_auth_event(cJSON* event, char* error_message, size_t error_siz
|
|||||||
|
|
||||||
// Add auth rule from configuration
|
// Add auth rule from configuration
|
||||||
int add_auth_rule_from_config(const char* rule_type, const char* pattern_type,
|
int add_auth_rule_from_config(const char* rule_type, const char* pattern_type,
|
||||||
const char* pattern_value, const char* action) {
|
const char* pattern_value) {
|
||||||
if (!g_db || !rule_type || !pattern_type || !pattern_value || !action) {
|
if (!g_db || !rule_type || !pattern_type || !pattern_value) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
const char* sql = "INSERT INTO auth_rules (rule_type, pattern_type, pattern_value, action) "
|
const char* sql = "INSERT INTO auth_rules (rule_type, pattern_type, pattern_value) "
|
||||||
"VALUES (?, ?, ?, ?)";
|
"VALUES (?, ?, ?)";
|
||||||
|
|
||||||
sqlite3_stmt* stmt;
|
sqlite3_stmt* stmt;
|
||||||
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
||||||
if (rc != SQLITE_OK) {
|
if (rc != SQLITE_OK) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
sqlite3_bind_text(stmt, 1, rule_type, -1, SQLITE_STATIC);
|
sqlite3_bind_text(stmt, 1, rule_type, -1, SQLITE_STATIC);
|
||||||
sqlite3_bind_text(stmt, 2, pattern_type, -1, SQLITE_STATIC);
|
sqlite3_bind_text(stmt, 2, pattern_type, -1, SQLITE_STATIC);
|
||||||
sqlite3_bind_text(stmt, 3, pattern_value, -1, SQLITE_STATIC);
|
sqlite3_bind_text(stmt, 3, pattern_value, -1, SQLITE_STATIC);
|
||||||
sqlite3_bind_text(stmt, 4, action, -1, SQLITE_STATIC);
|
|
||||||
|
|
||||||
rc = sqlite3_step(stmt);
|
rc = sqlite3_step(stmt);
|
||||||
sqlite3_finalize(stmt);
|
sqlite3_finalize(stmt);
|
||||||
|
|
||||||
return (rc == SQLITE_DONE) ? 0 : -1;
|
return (rc == SQLITE_DONE) ? 0 : -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2725,13 +2718,13 @@ int handle_auth_query_unified(cJSON* event, const char* query_type, char* error_
|
|||||||
|
|
||||||
// Build appropriate SQL query based on query type
|
// Build appropriate SQL query based on query type
|
||||||
if (strcmp(query_type, "all") == 0) {
|
if (strcmp(query_type, "all") == 0) {
|
||||||
sql = "SELECT rule_type, pattern_type, pattern_value, action FROM auth_rules ORDER BY rule_type, pattern_type";
|
sql = "SELECT rule_type, pattern_type, pattern_value FROM auth_rules WHERE active = 1 ORDER BY rule_type, pattern_type";
|
||||||
}
|
}
|
||||||
else if (strcmp(query_type, "whitelist") == 0) {
|
else if (strcmp(query_type, "whitelist") == 0) {
|
||||||
sql = "SELECT rule_type, pattern_type, pattern_value, action FROM auth_rules WHERE rule_type LIKE '%whitelist%' ORDER BY pattern_type";
|
sql = "SELECT rule_type, pattern_type, pattern_value FROM auth_rules WHERE rule_type LIKE '%whitelist%' AND active = 1 ORDER BY pattern_type";
|
||||||
}
|
}
|
||||||
else if (strcmp(query_type, "blacklist") == 0) {
|
else if (strcmp(query_type, "blacklist") == 0) {
|
||||||
sql = "SELECT rule_type, pattern_type, pattern_value, action FROM auth_rules WHERE rule_type LIKE '%blacklist%' ORDER BY pattern_type";
|
sql = "SELECT rule_type, pattern_type, pattern_value FROM auth_rules WHERE rule_type LIKE '%blacklist%' AND active = 1 ORDER BY pattern_type";
|
||||||
}
|
}
|
||||||
else if (strcmp(query_type, "pattern") == 0) {
|
else if (strcmp(query_type, "pattern") == 0) {
|
||||||
// Get pattern value from tags
|
// Get pattern value from tags
|
||||||
@@ -2740,7 +2733,7 @@ int handle_auth_query_unified(cJSON* event, const char* query_type, char* error_
|
|||||||
snprintf(error_message, error_size, "invalid: pattern query requires pattern value");
|
snprintf(error_message, error_size, "invalid: pattern query requires pattern value");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
sql = "SELECT rule_type, pattern_type, pattern_value, action FROM auth_rules WHERE pattern_value = ? ORDER BY rule_type, pattern_type";
|
sql = "SELECT rule_type, pattern_type, pattern_value FROM auth_rules WHERE pattern_value = ? AND active = 1 ORDER BY rule_type, pattern_type";
|
||||||
use_pattern_param = 1;
|
use_pattern_param = 1;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -2775,7 +2768,6 @@ int handle_auth_query_unified(cJSON* event, const char* query_type, char* error_
|
|||||||
const char* rule_type = (const char*)sqlite3_column_text(stmt, 0);
|
const char* rule_type = (const char*)sqlite3_column_text(stmt, 0);
|
||||||
const char* pattern_type = (const char*)sqlite3_column_text(stmt, 1);
|
const char* pattern_type = (const char*)sqlite3_column_text(stmt, 1);
|
||||||
const char* pattern_value_result = (const char*)sqlite3_column_text(stmt, 2);
|
const char* pattern_value_result = (const char*)sqlite3_column_text(stmt, 2);
|
||||||
const char* action = (const char*)sqlite3_column_text(stmt, 3);
|
|
||||||
|
|
||||||
// printf(" %s %s:%s -> %s\n",
|
// printf(" %s %s:%s -> %s\n",
|
||||||
// rule_type ? rule_type : "",
|
// rule_type ? rule_type : "",
|
||||||
@@ -2788,7 +2780,7 @@ int handle_auth_query_unified(cJSON* event, const char* query_type, char* error_
|
|||||||
cJSON_AddStringToObject(rule_obj, "rule_type", rule_type ? rule_type : "");
|
cJSON_AddStringToObject(rule_obj, "rule_type", rule_type ? rule_type : "");
|
||||||
cJSON_AddStringToObject(rule_obj, "pattern_type", pattern_type ? pattern_type : "");
|
cJSON_AddStringToObject(rule_obj, "pattern_type", pattern_type ? pattern_type : "");
|
||||||
cJSON_AddStringToObject(rule_obj, "pattern_value", pattern_value_result ? pattern_value_result : "");
|
cJSON_AddStringToObject(rule_obj, "pattern_value", pattern_value_result ? pattern_value_result : "");
|
||||||
cJSON_AddStringToObject(rule_obj, "action", action ? action : "allow");
|
cJSON_AddStringToObject(rule_obj, "action", "allow"); // Simplified: rule_type determines behavior
|
||||||
cJSON_AddItemToArray(results_array, rule_obj);
|
cJSON_AddItemToArray(results_array, rule_obj);
|
||||||
|
|
||||||
rule_count++;
|
rule_count++;
|
||||||
@@ -3314,7 +3306,7 @@ int handle_auth_rule_modification_unified(cJSON* event, char* error_message, siz
|
|||||||
|
|
||||||
// Process auth rule: ["blacklist"|"whitelist", "pubkey"|"hash", "value"]
|
// Process auth rule: ["blacklist"|"whitelist", "pubkey"|"hash", "value"]
|
||||||
if (strcmp(rule_type, "blacklist") == 0 || strcmp(rule_type, "whitelist") == 0) {
|
if (strcmp(rule_type, "blacklist") == 0 || strcmp(rule_type, "whitelist") == 0) {
|
||||||
if (add_auth_rule_from_config(rule_type, pattern_type, pattern_value, "allow") == 0) {
|
if (add_auth_rule_from_config(rule_type, pattern_type, pattern_value) == 0) {
|
||||||
rules_processed++;
|
rules_processed++;
|
||||||
|
|
||||||
// Add processed rule to response array
|
// Add processed rule to response array
|
||||||
@@ -3322,7 +3314,7 @@ int handle_auth_rule_modification_unified(cJSON* event, char* error_message, siz
|
|||||||
cJSON_AddStringToObject(rule_obj, "rule_type", rule_type);
|
cJSON_AddStringToObject(rule_obj, "rule_type", rule_type);
|
||||||
cJSON_AddStringToObject(rule_obj, "pattern_type", pattern_type);
|
cJSON_AddStringToObject(rule_obj, "pattern_type", pattern_type);
|
||||||
cJSON_AddStringToObject(rule_obj, "pattern_value", pattern_value);
|
cJSON_AddStringToObject(rule_obj, "pattern_value", pattern_value);
|
||||||
cJSON_AddStringToObject(rule_obj, "action", "allow");
|
cJSON_AddStringToObject(rule_obj, "action", "allow"); // Simplified: rule_type determines behavior
|
||||||
cJSON_AddStringToObject(rule_obj, "status", "added");
|
cJSON_AddStringToObject(rule_obj, "status", "added");
|
||||||
cJSON_AddItemToArray(processed_rules, rule_obj);
|
cJSON_AddItemToArray(processed_rules, rule_obj);
|
||||||
}
|
}
|
||||||
@@ -4101,6 +4093,23 @@ int populate_all_config_values_atomic(const char* admin_pubkey, const char* rela
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Insert monitoring system config entry (ephemeral kind 24567)
|
||||||
|
// Note: Monitoring is automatically activated when clients subscribe to kind 24567
|
||||||
|
sqlite3_reset(stmt);
|
||||||
|
sqlite3_bind_text(stmt, 1, "kind_24567_reporting_throttle_sec", -1, SQLITE_STATIC);
|
||||||
|
sqlite3_bind_text(stmt, 2, "5", -1, SQLITE_STATIC); // integer, default 5 seconds
|
||||||
|
sqlite3_bind_text(stmt, 3, "integer", -1, SQLITE_STATIC);
|
||||||
|
sqlite3_bind_text(stmt, 4, "Minimum seconds between monitoring event reports (ephemeral kind 24567)", -1, SQLITE_STATIC);
|
||||||
|
sqlite3_bind_text(stmt, 5, "monitoring", -1, SQLITE_STATIC);
|
||||||
|
sqlite3_bind_int(stmt, 6, 0); // does not require restart
|
||||||
|
rc = sqlite3_step(stmt);
|
||||||
|
if (rc != SQLITE_DONE) {
|
||||||
|
DEBUG_ERROR("Failed to insert kind_24567_reporting_throttle_sec: %s", sqlite3_errmsg(g_db));
|
||||||
|
sqlite3_finalize(stmt);
|
||||||
|
sqlite3_exec(g_db, "ROLLBACK;", NULL, NULL, NULL);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
sqlite3_finalize(stmt);
|
sqlite3_finalize(stmt);
|
||||||
|
|
||||||
// Commit transaction
|
// Commit transaction
|
||||||
|
|||||||
@@ -114,7 +114,7 @@ cJSON* build_query_response(const char* query_type, cJSON* results_array, int to
|
|||||||
|
|
||||||
// Auth rules management functions
|
// Auth rules management functions
|
||||||
int add_auth_rule_from_config(const char* rule_type, const char* pattern_type,
|
int add_auth_rule_from_config(const char* rule_type, const char* pattern_type,
|
||||||
const char* pattern_value, const char* action);
|
const char* pattern_value);
|
||||||
int remove_auth_rule_from_config(const char* rule_type, const char* pattern_type,
|
int remove_auth_rule_from_config(const char* rule_type, const char* pattern_type,
|
||||||
const char* pattern_value);
|
const char* pattern_value);
|
||||||
|
|
||||||
|
|||||||
51
src/debug.c
51
src/debug.c
@@ -1,51 +0,0 @@
|
|||||||
#include "debug.h"
|
|
||||||
#include <stdarg.h>
|
|
||||||
#include <string.h>
|
|
||||||
|
|
||||||
// Global debug level (default: no debug output)
|
|
||||||
debug_level_t g_debug_level = DEBUG_LEVEL_NONE;
|
|
||||||
|
|
||||||
void debug_init(int level) {
|
|
||||||
if (level < 0) level = 0;
|
|
||||||
if (level > 5) level = 5;
|
|
||||||
g_debug_level = (debug_level_t)level;
|
|
||||||
}
|
|
||||||
|
|
||||||
void debug_log(debug_level_t level, const char* file, int line, const char* format, ...) {
|
|
||||||
// Get timestamp
|
|
||||||
time_t now = time(NULL);
|
|
||||||
struct tm* tm_info = localtime(&now);
|
|
||||||
char timestamp[32];
|
|
||||||
strftime(timestamp, sizeof(timestamp), "%Y-%m-%d %H:%M:%S", tm_info);
|
|
||||||
|
|
||||||
// Get level string
|
|
||||||
const char* level_str = "UNKNOWN";
|
|
||||||
switch (level) {
|
|
||||||
case DEBUG_LEVEL_ERROR: level_str = "ERROR"; break;
|
|
||||||
case DEBUG_LEVEL_WARN: level_str = "WARN "; break;
|
|
||||||
case DEBUG_LEVEL_INFO: level_str = "INFO "; break;
|
|
||||||
case DEBUG_LEVEL_DEBUG: level_str = "DEBUG"; break;
|
|
||||||
case DEBUG_LEVEL_TRACE: level_str = "TRACE"; break;
|
|
||||||
default: break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print prefix with timestamp and level
|
|
||||||
printf("[%s] [%s] ", timestamp, level_str);
|
|
||||||
|
|
||||||
// Print source location when debug level is TRACE (5) or higher
|
|
||||||
if (file && g_debug_level >= DEBUG_LEVEL_TRACE) {
|
|
||||||
// Extract just the filename (not full path)
|
|
||||||
const char* filename = strrchr(file, '/');
|
|
||||||
filename = filename ? filename + 1 : file;
|
|
||||||
printf("[%s:%d] ", filename, line);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print message
|
|
||||||
va_list args;
|
|
||||||
va_start(args, format);
|
|
||||||
vprintf(format, args);
|
|
||||||
va_end(args);
|
|
||||||
|
|
||||||
printf("\n");
|
|
||||||
fflush(stdout);
|
|
||||||
}
|
|
||||||
43
src/debug.h
43
src/debug.h
@@ -1,43 +0,0 @@
|
|||||||
#ifndef DEBUG_H
|
|
||||||
#define DEBUG_H
|
|
||||||
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <time.h>
|
|
||||||
|
|
||||||
// Debug levels
|
|
||||||
typedef enum {
|
|
||||||
DEBUG_LEVEL_NONE = 0,
|
|
||||||
DEBUG_LEVEL_ERROR = 1,
|
|
||||||
DEBUG_LEVEL_WARN = 2,
|
|
||||||
DEBUG_LEVEL_INFO = 3,
|
|
||||||
DEBUG_LEVEL_DEBUG = 4,
|
|
||||||
DEBUG_LEVEL_TRACE = 5
|
|
||||||
} debug_level_t;
|
|
||||||
|
|
||||||
// Global debug level (set at runtime via CLI)
|
|
||||||
extern debug_level_t g_debug_level;
|
|
||||||
|
|
||||||
// Initialize debug system
|
|
||||||
void debug_init(int level);
|
|
||||||
|
|
||||||
// Core logging function
|
|
||||||
void debug_log(debug_level_t level, const char* file, int line, const char* format, ...);
|
|
||||||
|
|
||||||
// Convenience macros that check level before calling
|
|
||||||
// Note: TRACE level (5) and above include file:line information for ALL messages
|
|
||||||
#define DEBUG_ERROR(...) \
|
|
||||||
do { if (g_debug_level >= DEBUG_LEVEL_ERROR) debug_log(DEBUG_LEVEL_ERROR, __FILE__, __LINE__, __VA_ARGS__); } while(0)
|
|
||||||
|
|
||||||
#define DEBUG_WARN(...) \
|
|
||||||
do { if (g_debug_level >= DEBUG_LEVEL_WARN) debug_log(DEBUG_LEVEL_WARN, __FILE__, __LINE__, __VA_ARGS__); } while(0)
|
|
||||||
|
|
||||||
#define DEBUG_INFO(...) \
|
|
||||||
do { if (g_debug_level >= DEBUG_LEVEL_INFO) debug_log(DEBUG_LEVEL_INFO, __FILE__, __LINE__, __VA_ARGS__); } while(0)
|
|
||||||
|
|
||||||
#define DEBUG_LOG(...) \
|
|
||||||
do { if (g_debug_level >= DEBUG_LEVEL_DEBUG) debug_log(DEBUG_LEVEL_DEBUG, __FILE__, __LINE__, __VA_ARGS__); } while(0)
|
|
||||||
|
|
||||||
#define DEBUG_TRACE(...) \
|
|
||||||
do { if (g_debug_level >= DEBUG_LEVEL_TRACE) debug_log(DEBUG_LEVEL_TRACE, __FILE__, __LINE__, __VA_ARGS__); } while(0)
|
|
||||||
|
|
||||||
#endif /* DEBUG_H */
|
|
||||||
@@ -72,7 +72,13 @@ static const struct {
|
|||||||
|
|
||||||
// Performance Settings
|
// Performance Settings
|
||||||
{"default_limit", "500"},
|
{"default_limit", "500"},
|
||||||
{"max_limit", "5000"}
|
{"max_limit", "5000"},
|
||||||
|
|
||||||
|
// Proxy Settings
|
||||||
|
// Trust proxy headers (X-Forwarded-For, X-Real-IP) for accurate client IP detection
|
||||||
|
// Safe for informational/debugging use. Only becomes a security concern if you implement
|
||||||
|
// IP-based rate limiting or access control (which would require firewall protection anyway)
|
||||||
|
{"trust_proxy_headers", "true"}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Number of default configuration values
|
// Number of default configuration values
|
||||||
|
|||||||
1154
src/dm_admin.c
1154
src/dm_admin.c
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,5 @@
|
|||||||
|
// Note to assistants. dm_admin is only for functions relating to direct messaging
|
||||||
|
|
||||||
#ifndef DM_ADMIN_H
|
#ifndef DM_ADMIN_H
|
||||||
#define DM_ADMIN_H
|
#define DM_ADMIN_H
|
||||||
|
|
||||||
@@ -24,4 +26,11 @@ int send_nip17_response(const char* sender_pubkey, const char* response_content,
|
|||||||
char* generate_config_text(void);
|
char* generate_config_text(void);
|
||||||
char* generate_stats_text(void);
|
char* generate_stats_text(void);
|
||||||
|
|
||||||
|
// SQL query admin functions
|
||||||
|
int validate_sql_query(const char* query, char* error_message, size_t error_size);
|
||||||
|
char* execute_sql_query(const char* query, const char* request_id, char* error_message, size_t error_size);
|
||||||
|
int handle_sql_query_unified(cJSON* event, const char* query, char* error_message, size_t error_size, struct lws* wsi);
|
||||||
|
int send_admin_response(const char* sender_pubkey, const char* response_content, const char* request_id,
|
||||||
|
char* error_message, size_t error_size, struct lws* wsi);
|
||||||
|
|
||||||
#endif // DM_ADMIN_H
|
#endif // DM_ADMIN_H
|
||||||
File diff suppressed because one or more lines are too long
78
src/main.c
78
src/main.c
@@ -149,6 +149,9 @@ int mark_event_as_deleted(const char* event_id, const char* deletion_event_id, c
|
|||||||
// Forward declaration for database functions
|
// Forward declaration for database functions
|
||||||
int store_event(cJSON* event);
|
int store_event(cJSON* event);
|
||||||
|
|
||||||
|
// Forward declaration for monitoring system
|
||||||
|
void monitoring_on_event_stored(void);
|
||||||
|
|
||||||
// Forward declarations for NIP-11 relay information handling
|
// Forward declarations for NIP-11 relay information handling
|
||||||
void init_relay_info();
|
void init_relay_info();
|
||||||
void cleanup_relay_info();
|
void cleanup_relay_info();
|
||||||
@@ -312,14 +315,35 @@ int init_database(const char* database_path_override) {
|
|||||||
if (g_debug_level >= DEBUG_LEVEL_DEBUG) {
|
if (g_debug_level >= DEBUG_LEVEL_DEBUG) {
|
||||||
// Check config table row count immediately after database open
|
// Check config table row count immediately after database open
|
||||||
sqlite3_stmt* stmt;
|
sqlite3_stmt* stmt;
|
||||||
if (sqlite3_prepare_v2(g_db, "SELECT COUNT(*) FROM config", -1, &stmt, NULL) == SQLITE_OK) {
|
int rc = sqlite3_prepare_v2(g_db, "SELECT COUNT(*) FROM config", -1, &stmt, NULL);
|
||||||
|
if (rc == SQLITE_OK) {
|
||||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||||
int row_count = sqlite3_column_int(stmt, 0);
|
int row_count = sqlite3_column_int(stmt, 0);
|
||||||
DEBUG_LOG("Config table row count immediately after sqlite3_open(): %d", row_count);
|
DEBUG_LOG("Config table row count immediately after sqlite3_open(): %d", row_count);
|
||||||
}
|
}
|
||||||
sqlite3_finalize(stmt);
|
sqlite3_finalize(stmt);
|
||||||
} else {
|
} else {
|
||||||
DEBUG_LOG("Config table does not exist yet (first-time startup)");
|
// Capture and log the actual SQLite error instead of assuming table doesn't exist
|
||||||
|
const char* err_msg = sqlite3_errmsg(g_db);
|
||||||
|
DEBUG_LOG("Failed to prepare config table query: %s (error code: %d)", err_msg, rc);
|
||||||
|
|
||||||
|
// Check if it's actually a missing table vs other error
|
||||||
|
if (rc == SQLITE_ERROR) {
|
||||||
|
// Try to check if config table exists
|
||||||
|
sqlite3_stmt* check_stmt;
|
||||||
|
int check_rc = sqlite3_prepare_v2(g_db, "SELECT name FROM sqlite_master WHERE type='table' AND name='config'", -1, &check_stmt, NULL);
|
||||||
|
if (check_rc == SQLITE_OK) {
|
||||||
|
int has_table = (sqlite3_step(check_stmt) == SQLITE_ROW);
|
||||||
|
sqlite3_finalize(check_stmt);
|
||||||
|
if (has_table) {
|
||||||
|
DEBUG_LOG("Config table EXISTS but query failed - possible database corruption or locking issue");
|
||||||
|
} else {
|
||||||
|
DEBUG_LOG("Config table does not exist yet (first-time startup)");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
DEBUG_LOG("Failed to check table existence: %s (error code: %d)", sqlite3_errmsg(g_db), check_rc);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// DEBUG_GUARD_END
|
// DEBUG_GUARD_END
|
||||||
@@ -729,8 +753,12 @@ int store_event(cJSON* event) {
|
|||||||
free(tags_json);
|
free(tags_json);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
free(tags_json);
|
free(tags_json);
|
||||||
|
|
||||||
|
// Call monitoring hook after successful event storage
|
||||||
|
monitoring_on_event_stored();
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1428,7 +1456,7 @@ void print_usage(const char* program_name) {
|
|||||||
printf("Options:\n");
|
printf("Options:\n");
|
||||||
printf(" -h, --help Show this help message\n");
|
printf(" -h, --help Show this help message\n");
|
||||||
printf(" -v, --version Show version information\n");
|
printf(" -v, --version Show version information\n");
|
||||||
printf(" -p, --port PORT Override relay port (first-time startup only)\n");
|
printf(" -p, --port PORT Override relay port (first-time startup and existing relay restarts)\n");
|
||||||
printf(" --strict-port Fail if exact port is unavailable (no port increment)\n");
|
printf(" --strict-port Fail if exact port is unavailable (no port increment)\n");
|
||||||
printf(" -a, --admin-pubkey KEY Override admin public key (64-char hex or npub)\n");
|
printf(" -a, --admin-pubkey KEY Override admin public key (64-char hex or npub)\n");
|
||||||
printf(" -r, --relay-privkey KEY Override relay private key (64-char hex or nsec)\n");
|
printf(" -r, --relay-privkey KEY Override relay private key (64-char hex or nsec)\n");
|
||||||
@@ -1438,13 +1466,14 @@ void print_usage(const char* program_name) {
|
|||||||
printf("Configuration:\n");
|
printf("Configuration:\n");
|
||||||
printf(" This relay uses event-based configuration stored in the database.\n");
|
printf(" This relay uses event-based configuration stored in the database.\n");
|
||||||
printf(" On first startup, keys are automatically generated and printed once.\n");
|
printf(" On first startup, keys are automatically generated and printed once.\n");
|
||||||
printf(" Command line options like --port only apply during first-time setup.\n");
|
printf(" Command line options like --port apply during first-time setup and existing relay restarts.\n");
|
||||||
printf(" After initial setup, all configuration is managed via database events.\n");
|
printf(" After initial setup, all configuration is managed via database events.\n");
|
||||||
printf(" Database file: <relay_pubkey>.db (created automatically)\n");
|
printf(" Database file: <relay_pubkey>.db (created automatically)\n");
|
||||||
printf("\n");
|
printf("\n");
|
||||||
printf("Port Binding:\n");
|
printf("Port Binding:\n");
|
||||||
printf(" Default: Try up to 10 consecutive ports if requested port is busy\n");
|
printf(" Default: Try up to 10 consecutive ports if requested port is busy\n");
|
||||||
printf(" --strict-port: Fail immediately if exact requested port is unavailable\n");
|
printf(" --strict-port: Fail immediately if exact requested port is unavailable\n");
|
||||||
|
printf(" --strict-port works with any custom port specified via -p or --port\n");
|
||||||
printf("\n");
|
printf("\n");
|
||||||
printf("Examples:\n");
|
printf("Examples:\n");
|
||||||
printf(" %s # Start relay (auto-configure on first run)\n", program_name);
|
printf(" %s # Start relay (auto-configure on first run)\n", program_name);
|
||||||
@@ -1791,7 +1820,7 @@ int main(int argc, char* argv[]) {
|
|||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Setup existing relay (sets database path and loads config)
|
// Setup existing relay FIRST (sets database path)
|
||||||
if (startup_existing_relay(relay_pubkey, &cli_options) != 0) {
|
if (startup_existing_relay(relay_pubkey, &cli_options) != 0) {
|
||||||
DEBUG_ERROR("Failed to setup existing relay");
|
DEBUG_ERROR("Failed to setup existing relay");
|
||||||
cleanup_configuration_system();
|
cleanup_configuration_system();
|
||||||
@@ -1804,23 +1833,7 @@ int main(int argc, char* argv[]) {
|
|||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check config table row count before database initialization
|
// Initialize database with the database path set by startup_existing_relay()
|
||||||
{
|
|
||||||
sqlite3* temp_db = NULL;
|
|
||||||
if (sqlite3_open(g_database_path, &temp_db) == SQLITE_OK) {
|
|
||||||
sqlite3_stmt* stmt;
|
|
||||||
if (sqlite3_prepare_v2(temp_db, "SELECT COUNT(*) FROM config", -1, &stmt, NULL) == SQLITE_OK) {
|
|
||||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
|
||||||
int row_count = sqlite3_column_int(stmt, 0);
|
|
||||||
printf(" Config table row count before database initialization: %d\n", row_count);
|
|
||||||
}
|
|
||||||
sqlite3_finalize(stmt);
|
|
||||||
}
|
|
||||||
sqlite3_close(temp_db);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize database with existing database path
|
|
||||||
DEBUG_TRACE("Initializing existing database");
|
DEBUG_TRACE("Initializing existing database");
|
||||||
if (init_database(g_database_path) != 0) {
|
if (init_database(g_database_path) != 0) {
|
||||||
DEBUG_ERROR("Failed to initialize existing database");
|
DEBUG_ERROR("Failed to initialize existing database");
|
||||||
@@ -1835,6 +1848,20 @@ int main(int argc, char* argv[]) {
|
|||||||
}
|
}
|
||||||
DEBUG_LOG("Existing database initialized");
|
DEBUG_LOG("Existing database initialized");
|
||||||
|
|
||||||
|
// Apply CLI overrides atomically (now that database is initialized)
|
||||||
|
if (apply_cli_overrides_atomic(&cli_options) != 0) {
|
||||||
|
DEBUG_ERROR("Failed to apply CLI overrides for existing relay");
|
||||||
|
cleanup_configuration_system();
|
||||||
|
free(relay_pubkey);
|
||||||
|
for (int i = 0; existing_files[i]; i++) {
|
||||||
|
free(existing_files[i]);
|
||||||
|
}
|
||||||
|
free(existing_files);
|
||||||
|
nostr_cleanup();
|
||||||
|
close_database();
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
// DEBUG_GUARD_START
|
// DEBUG_GUARD_START
|
||||||
if (g_debug_level >= DEBUG_LEVEL_DEBUG) {
|
if (g_debug_level >= DEBUG_LEVEL_DEBUG) {
|
||||||
sqlite3_stmt* stmt;
|
sqlite3_stmt* stmt;
|
||||||
@@ -1979,6 +2006,7 @@ int main(int argc, char* argv[]) {
|
|||||||
|
|
||||||
// Initialize NIP-40 expiration configuration
|
// Initialize NIP-40 expiration configuration
|
||||||
init_expiration_config();
|
init_expiration_config();
|
||||||
|
|
||||||
// Update subscription manager configuration
|
// Update subscription manager configuration
|
||||||
update_subscription_manager_config();
|
update_subscription_manager_config();
|
||||||
|
|
||||||
@@ -2002,8 +2030,8 @@ int main(int argc, char* argv[]) {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
// Start WebSocket Nostr relay server (port from configuration)
|
// Start WebSocket Nostr relay server (port from CLI override or configuration)
|
||||||
int result = start_websocket_relay(-1, cli_options.strict_port); // Let config system determine port, pass strict_port flag
|
int result = start_websocket_relay(cli_options.port_override, cli_options.strict_port); // Use CLI port override if specified, otherwise config
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
cleanup_relay_info();
|
cleanup_relay_info();
|
||||||
|
|||||||
@@ -10,10 +10,10 @@
|
|||||||
#define MAIN_H
|
#define MAIN_H
|
||||||
|
|
||||||
// Version information (auto-updated by build system)
|
// Version information (auto-updated by build system)
|
||||||
#define VERSION "v0.4.6"
|
#define VERSION "v0.7.38"
|
||||||
#define VERSION_MAJOR 0
|
#define VERSION_MAJOR 0
|
||||||
#define VERSION_MINOR 4
|
#define VERSION_MINOR 7
|
||||||
#define VERSION_PATCH 6
|
#define VERSION_PATCH 38
|
||||||
|
|
||||||
// Relay metadata (authoritative source for NIP-11 information)
|
// Relay metadata (authoritative source for NIP-11 information)
|
||||||
#define RELAY_NAME "C-Relay"
|
#define RELAY_NAME "C-Relay"
|
||||||
|
|||||||
@@ -15,6 +15,7 @@
|
|||||||
#include "../nostr_core_lib/nostr_core/nip013.h" // NIP-13: Proof of Work
|
#include "../nostr_core_lib/nostr_core/nip013.h" // NIP-13: Proof of Work
|
||||||
#include "../nostr_core_lib/nostr_core/nostr_common.h"
|
#include "../nostr_core_lib/nostr_core/nostr_common.h"
|
||||||
#include "../nostr_core_lib/nostr_core/utils.h"
|
#include "../nostr_core_lib/nostr_core/utils.h"
|
||||||
|
#include "debug.h" // C-relay debug system
|
||||||
#include "config.h" // C-relay configuration system
|
#include "config.h" // C-relay configuration system
|
||||||
#include <sqlite3.h>
|
#include <sqlite3.h>
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
@@ -531,6 +532,8 @@ int check_database_auth_rules(const char *pubkey, const char *operation __attrib
|
|||||||
sqlite3_stmt *stmt = NULL;
|
sqlite3_stmt *stmt = NULL;
|
||||||
int rc;
|
int rc;
|
||||||
|
|
||||||
|
DEBUG_TRACE("Checking auth rules for pubkey: %s", pubkey);
|
||||||
|
|
||||||
if (!pubkey) {
|
if (!pubkey) {
|
||||||
return NOSTR_ERROR_INVALID_INPUT;
|
return NOSTR_ERROR_INVALID_INPUT;
|
||||||
}
|
}
|
||||||
@@ -547,19 +550,21 @@ int check_database_auth_rules(const char *pubkey, const char *operation __attrib
|
|||||||
|
|
||||||
// Step 1: Check pubkey blacklist (highest priority)
|
// Step 1: Check pubkey blacklist (highest priority)
|
||||||
const char *blacklist_sql =
|
const char *blacklist_sql =
|
||||||
"SELECT rule_type, action FROM auth_rules WHERE rule_type = "
|
"SELECT rule_type FROM auth_rules WHERE rule_type = "
|
||||||
"'blacklist' AND pattern_type = 'pubkey' AND pattern_value = ? LIMIT 1";
|
"'blacklist' AND pattern_type = 'pubkey' AND pattern_value = ? AND active = 1 LIMIT 1";
|
||||||
|
DEBUG_TRACE("Blacklist SQL: %s", blacklist_sql);
|
||||||
rc = sqlite3_prepare_v2(db, blacklist_sql, -1, &stmt, NULL);
|
rc = sqlite3_prepare_v2(db, blacklist_sql, -1, &stmt, NULL);
|
||||||
if (rc == SQLITE_OK) {
|
if (rc == SQLITE_OK) {
|
||||||
sqlite3_bind_text(stmt, 1, pubkey, -1, SQLITE_STATIC);
|
sqlite3_bind_text(stmt, 1, pubkey, -1, SQLITE_STATIC);
|
||||||
|
|
||||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
int step_result = sqlite3_step(stmt);
|
||||||
const char *action = (const char *)sqlite3_column_text(stmt, 1);
|
DEBUG_TRACE("Blacklist query result: %s", step_result == SQLITE_ROW ? "FOUND" : "NOT_FOUND");
|
||||||
|
|
||||||
|
if (step_result == SQLITE_ROW) {
|
||||||
|
DEBUG_TRACE("BLACKLIST HIT: Denying access for pubkey: %s", pubkey);
|
||||||
// Set specific violation details for status code mapping
|
// Set specific violation details for status code mapping
|
||||||
strcpy(g_last_rule_violation.violation_type, "pubkey_blacklist");
|
strcpy(g_last_rule_violation.violation_type, "pubkey_blacklist");
|
||||||
sprintf(g_last_rule_violation.reason, "Public key blacklisted: %s",
|
sprintf(g_last_rule_violation.reason, "Public key blacklisted");
|
||||||
action ? action : "PUBKEY_BLACKLIST");
|
|
||||||
|
|
||||||
sqlite3_finalize(stmt);
|
sqlite3_finalize(stmt);
|
||||||
sqlite3_close(db);
|
sqlite3_close(db);
|
||||||
@@ -571,19 +576,16 @@ int check_database_auth_rules(const char *pubkey, const char *operation __attrib
|
|||||||
// Step 2: Check hash blacklist
|
// Step 2: Check hash blacklist
|
||||||
if (resource_hash) {
|
if (resource_hash) {
|
||||||
const char *hash_blacklist_sql =
|
const char *hash_blacklist_sql =
|
||||||
"SELECT rule_type, action FROM auth_rules WHERE rule_type = "
|
"SELECT rule_type FROM auth_rules WHERE rule_type = "
|
||||||
"'blacklist' AND pattern_type = 'hash' AND pattern_value = ? LIMIT 1";
|
"'blacklist' AND pattern_type = 'hash' AND pattern_value = ? AND active = 1 LIMIT 1";
|
||||||
rc = sqlite3_prepare_v2(db, hash_blacklist_sql, -1, &stmt, NULL);
|
rc = sqlite3_prepare_v2(db, hash_blacklist_sql, -1, &stmt, NULL);
|
||||||
if (rc == SQLITE_OK) {
|
if (rc == SQLITE_OK) {
|
||||||
sqlite3_bind_text(stmt, 1, resource_hash, -1, SQLITE_STATIC);
|
sqlite3_bind_text(stmt, 1, resource_hash, -1, SQLITE_STATIC);
|
||||||
|
|
||||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||||
const char *action = (const char *)sqlite3_column_text(stmt, 1);
|
|
||||||
|
|
||||||
// Set specific violation details for status code mapping
|
// Set specific violation details for status code mapping
|
||||||
strcpy(g_last_rule_violation.violation_type, "hash_blacklist");
|
strcpy(g_last_rule_violation.violation_type, "hash_blacklist");
|
||||||
sprintf(g_last_rule_violation.reason, "File hash blacklisted: %s",
|
sprintf(g_last_rule_violation.reason, "File hash blacklisted");
|
||||||
action ? action : "HASH_BLACKLIST");
|
|
||||||
|
|
||||||
sqlite3_finalize(stmt);
|
sqlite3_finalize(stmt);
|
||||||
sqlite3_close(db);
|
sqlite3_close(db);
|
||||||
@@ -595,8 +597,8 @@ int check_database_auth_rules(const char *pubkey, const char *operation __attrib
|
|||||||
|
|
||||||
// Step 3: Check pubkey whitelist
|
// Step 3: Check pubkey whitelist
|
||||||
const char *whitelist_sql =
|
const char *whitelist_sql =
|
||||||
"SELECT rule_type, action FROM auth_rules WHERE rule_type = "
|
"SELECT rule_type FROM auth_rules WHERE rule_type = "
|
||||||
"'whitelist' AND pattern_type = 'pubkey' AND pattern_value = ? LIMIT 1";
|
"'whitelist' AND pattern_type = 'pubkey' AND pattern_value = ? AND active = 1 LIMIT 1";
|
||||||
rc = sqlite3_prepare_v2(db, whitelist_sql, -1, &stmt, NULL);
|
rc = sqlite3_prepare_v2(db, whitelist_sql, -1, &stmt, NULL);
|
||||||
if (rc == SQLITE_OK) {
|
if (rc == SQLITE_OK) {
|
||||||
sqlite3_bind_text(stmt, 1, pubkey, -1, SQLITE_STATIC);
|
sqlite3_bind_text(stmt, 1, pubkey, -1, SQLITE_STATIC);
|
||||||
@@ -612,7 +614,7 @@ int check_database_auth_rules(const char *pubkey, const char *operation __attrib
|
|||||||
// Step 4: Check if any whitelist rules exist - if yes, deny by default
|
// Step 4: Check if any whitelist rules exist - if yes, deny by default
|
||||||
const char *whitelist_exists_sql =
|
const char *whitelist_exists_sql =
|
||||||
"SELECT COUNT(*) FROM auth_rules WHERE rule_type = 'whitelist' "
|
"SELECT COUNT(*) FROM auth_rules WHERE rule_type = 'whitelist' "
|
||||||
"AND pattern_type = 'pubkey' LIMIT 1";
|
"AND pattern_type = 'pubkey' AND active = 1 LIMIT 1";
|
||||||
rc = sqlite3_prepare_v2(db, whitelist_exists_sql, -1, &stmt, NULL);
|
rc = sqlite3_prepare_v2(db, whitelist_exists_sql, -1, &stmt, NULL);
|
||||||
if (rc == SQLITE_OK) {
|
if (rc == SQLITE_OK) {
|
||||||
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
if (sqlite3_step(stmt) == SQLITE_ROW) {
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
/* Embedded SQL Schema for C Nostr Relay
|
/* Embedded SQL Schema for C Nostr Relay
|
||||||
* Generated from db/schema.sql - Do not edit manually
|
* Generated from db/schema.sql - Do not edit manually
|
||||||
* Schema Version: 7
|
* Schema Version: 8
|
||||||
*/
|
*/
|
||||||
#ifndef SQL_SCHEMA_H
|
#ifndef SQL_SCHEMA_H
|
||||||
#define SQL_SCHEMA_H
|
#define SQL_SCHEMA_H
|
||||||
|
|
||||||
/* Schema version constant */
|
/* Schema version constant */
|
||||||
#define EMBEDDED_SCHEMA_VERSION "7"
|
#define EMBEDDED_SCHEMA_VERSION "8"
|
||||||
|
|
||||||
/* Embedded SQL schema as C string literal */
|
/* Embedded SQL schema as C string literal */
|
||||||
static const char* const EMBEDDED_SCHEMA_SQL =
|
static const char* const EMBEDDED_SCHEMA_SQL =
|
||||||
@@ -15,7 +15,7 @@ static const char* const EMBEDDED_SCHEMA_SQL =
|
|||||||
-- Configuration system using config table\n\
|
-- Configuration system using config table\n\
|
||||||
\n\
|
\n\
|
||||||
-- Schema version tracking\n\
|
-- Schema version tracking\n\
|
||||||
PRAGMA user_version = 7;\n\
|
PRAGMA user_version = 8;\n\
|
||||||
\n\
|
\n\
|
||||||
-- Enable foreign key support\n\
|
-- Enable foreign key support\n\
|
||||||
PRAGMA foreign_keys = ON;\n\
|
PRAGMA foreign_keys = ON;\n\
|
||||||
@@ -58,8 +58,8 @@ CREATE TABLE schema_info (\n\
|
|||||||
\n\
|
\n\
|
||||||
-- Insert schema metadata\n\
|
-- Insert schema metadata\n\
|
||||||
INSERT INTO schema_info (key, value) VALUES\n\
|
INSERT INTO schema_info (key, value) VALUES\n\
|
||||||
('version', '7'),\n\
|
('version', '8'),\n\
|
||||||
('description', 'Hybrid Nostr relay schema with event-based and table-based configuration'),\n\
|
('description', 'Hybrid Nostr relay schema with subscription deduplication support'),\n\
|
||||||
('created_at', strftime('%s', 'now'));\n\
|
('created_at', strftime('%s', 'now'));\n\
|
||||||
\n\
|
\n\
|
||||||
-- Helper views for common queries\n\
|
-- Helper views for common queries\n\
|
||||||
@@ -142,8 +142,6 @@ CREATE TABLE auth_rules (\n\
|
|||||||
rule_type TEXT NOT NULL CHECK (rule_type IN ('whitelist', 'blacklist', 'rate_limit', 'auth_required')),\n\
|
rule_type TEXT NOT NULL CHECK (rule_type IN ('whitelist', 'blacklist', 'rate_limit', 'auth_required')),\n\
|
||||||
pattern_type TEXT NOT NULL CHECK (pattern_type IN ('pubkey', 'kind', 'ip', 'global')),\n\
|
pattern_type TEXT NOT NULL CHECK (pattern_type IN ('pubkey', 'kind', 'ip', 'global')),\n\
|
||||||
pattern_value TEXT,\n\
|
pattern_value TEXT,\n\
|
||||||
action TEXT NOT NULL CHECK (action IN ('allow', 'deny', 'require_auth', 'rate_limit')),\n\
|
|
||||||
parameters TEXT, -- JSON parameters for rate limiting, etc.\n\
|
|
||||||
active INTEGER NOT NULL DEFAULT 1,\n\
|
active INTEGER NOT NULL DEFAULT 1,\n\
|
||||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),\n\
|
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),\n\
|
||||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now'))\n\
|
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now'))\n\
|
||||||
@@ -183,17 +181,19 @@ END;\n\
|
|||||||
-- Persistent Subscriptions Logging Tables (Phase 2)\n\
|
-- Persistent Subscriptions Logging Tables (Phase 2)\n\
|
||||||
-- Optional database logging for subscription analytics and debugging\n\
|
-- Optional database logging for subscription analytics and debugging\n\
|
||||||
\n\
|
\n\
|
||||||
-- Subscription events log\n\
|
-- Subscriptions log (renamed from subscription_events for clarity)\n\
|
||||||
CREATE TABLE subscription_events (\n\
|
CREATE TABLE subscriptions (\n\
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,\n\
|
id INTEGER PRIMARY KEY AUTOINCREMENT,\n\
|
||||||
subscription_id TEXT NOT NULL, -- Subscription ID from client\n\
|
subscription_id TEXT NOT NULL, -- Subscription ID from client\n\
|
||||||
|
wsi_pointer TEXT NOT NULL, -- WebSocket pointer address (hex string)\n\
|
||||||
client_ip TEXT NOT NULL, -- Client IP address\n\
|
client_ip TEXT NOT NULL, -- Client IP address\n\
|
||||||
event_type TEXT NOT NULL CHECK (event_type IN ('created', 'closed', 'expired', 'disconnected')),\n\
|
event_type TEXT NOT NULL CHECK (event_type IN ('created', 'closed', 'expired', 'disconnected')),\n\
|
||||||
filter_json TEXT, -- JSON representation of filters (for created events)\n\
|
filter_json TEXT, -- JSON representation of filters (for created events)\n\
|
||||||
events_sent INTEGER DEFAULT 0, -- Number of events sent to this subscription\n\
|
events_sent INTEGER DEFAULT 0, -- Number of events sent to this subscription\n\
|
||||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),\n\
|
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now')),\n\
|
||||||
ended_at INTEGER, -- When subscription ended (for closed/expired/disconnected)\n\
|
ended_at INTEGER, -- When subscription ended (for closed/expired/disconnected)\n\
|
||||||
duration INTEGER -- Computed: ended_at - created_at\n\
|
duration INTEGER, -- Computed: ended_at - created_at\n\
|
||||||
|
UNIQUE(subscription_id, wsi_pointer) -- Prevent duplicate subscriptions per connection\n\
|
||||||
);\n\
|
);\n\
|
||||||
\n\
|
\n\
|
||||||
-- Subscription metrics summary\n\
|
-- Subscription metrics summary\n\
|
||||||
@@ -220,10 +220,11 @@ CREATE TABLE event_broadcasts (\n\
|
|||||||
);\n\
|
);\n\
|
||||||
\n\
|
\n\
|
||||||
-- Indexes for subscription logging performance\n\
|
-- Indexes for subscription logging performance\n\
|
||||||
CREATE INDEX idx_subscription_events_id ON subscription_events(subscription_id);\n\
|
CREATE INDEX idx_subscriptions_id ON subscriptions(subscription_id);\n\
|
||||||
CREATE INDEX idx_subscription_events_type ON subscription_events(event_type);\n\
|
CREATE INDEX idx_subscriptions_type ON subscriptions(event_type);\n\
|
||||||
CREATE INDEX idx_subscription_events_created ON subscription_events(created_at DESC);\n\
|
CREATE INDEX idx_subscriptions_created ON subscriptions(created_at DESC);\n\
|
||||||
CREATE INDEX idx_subscription_events_client ON subscription_events(client_ip);\n\
|
CREATE INDEX idx_subscriptions_client ON subscriptions(client_ip);\n\
|
||||||
|
CREATE INDEX idx_subscriptions_wsi ON subscriptions(wsi_pointer);\n\
|
||||||
\n\
|
\n\
|
||||||
CREATE INDEX idx_subscription_metrics_date ON subscription_metrics(date DESC);\n\
|
CREATE INDEX idx_subscription_metrics_date ON subscription_metrics(date DESC);\n\
|
||||||
\n\
|
\n\
|
||||||
@@ -233,10 +234,10 @@ CREATE INDEX idx_event_broadcasts_time ON event_broadcasts(broadcast_at DESC);\n
|
|||||||
\n\
|
\n\
|
||||||
-- Trigger to update subscription duration when ended\n\
|
-- Trigger to update subscription duration when ended\n\
|
||||||
CREATE TRIGGER update_subscription_duration\n\
|
CREATE TRIGGER update_subscription_duration\n\
|
||||||
AFTER UPDATE OF ended_at ON subscription_events\n\
|
AFTER UPDATE OF ended_at ON subscriptions\n\
|
||||||
WHEN NEW.ended_at IS NOT NULL AND OLD.ended_at IS NULL\n\
|
WHEN NEW.ended_at IS NOT NULL AND OLD.ended_at IS NULL\n\
|
||||||
BEGIN\n\
|
BEGIN\n\
|
||||||
UPDATE subscription_events\n\
|
UPDATE subscriptions\n\
|
||||||
SET duration = NEW.ended_at - NEW.created_at\n\
|
SET duration = NEW.ended_at - NEW.created_at\n\
|
||||||
WHERE id = NEW.id;\n\
|
WHERE id = NEW.id;\n\
|
||||||
END;\n\
|
END;\n\
|
||||||
@@ -251,7 +252,7 @@ SELECT\n\
|
|||||||
MAX(events_sent) as max_events_sent,\n\
|
MAX(events_sent) as max_events_sent,\n\
|
||||||
AVG(events_sent) as avg_events_sent,\n\
|
AVG(events_sent) as avg_events_sent,\n\
|
||||||
COUNT(DISTINCT client_ip) as unique_clients\n\
|
COUNT(DISTINCT client_ip) as unique_clients\n\
|
||||||
FROM subscription_events\n\
|
FROM subscriptions\n\
|
||||||
GROUP BY date(created_at, 'unixepoch')\n\
|
GROUP BY date(created_at, 'unixepoch')\n\
|
||||||
ORDER BY date DESC;\n\
|
ORDER BY date DESC;\n\
|
||||||
\n\
|
\n\
|
||||||
@@ -264,10 +265,10 @@ SELECT\n\
|
|||||||
events_sent,\n\
|
events_sent,\n\
|
||||||
created_at,\n\
|
created_at,\n\
|
||||||
(strftime('%s', 'now') - created_at) as duration_seconds\n\
|
(strftime('%s', 'now') - created_at) as duration_seconds\n\
|
||||||
FROM subscription_events\n\
|
FROM subscriptions\n\
|
||||||
WHERE event_type = 'created'\n\
|
WHERE event_type = 'created'\n\
|
||||||
AND subscription_id NOT IN (\n\
|
AND subscription_id NOT IN (\n\
|
||||||
SELECT subscription_id FROM subscription_events\n\
|
SELECT subscription_id FROM subscriptions\n\
|
||||||
WHERE event_type IN ('closed', 'expired', 'disconnected')\n\
|
WHERE event_type IN ('closed', 'expired', 'disconnected')\n\
|
||||||
);\n\
|
);\n\
|
||||||
\n\
|
\n\
|
||||||
|
|||||||
@@ -25,6 +25,9 @@ int validate_timestamp_range(long since, long until, char* error_message, size_t
|
|||||||
int validate_numeric_limits(int limit, char* error_message, size_t error_size);
|
int validate_numeric_limits(int limit, char* error_message, size_t error_size);
|
||||||
int validate_search_term(const char* search_term, char* error_message, size_t error_size);
|
int validate_search_term(const char* search_term, char* error_message, size_t error_size);
|
||||||
|
|
||||||
|
// Forward declaration for monitoring function
|
||||||
|
void monitoring_on_subscription_change(void);
|
||||||
|
|
||||||
// Global database variable
|
// Global database variable
|
||||||
extern sqlite3* g_db;
|
extern sqlite3* g_db;
|
||||||
|
|
||||||
@@ -123,7 +126,7 @@ void free_subscription_filter(subscription_filter_t* filter) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Validate subscription ID format and length
|
// Validate subscription ID format and length
|
||||||
static int validate_subscription_id(const char* sub_id) {
|
int validate_subscription_id(const char* sub_id) {
|
||||||
if (!sub_id) {
|
if (!sub_id) {
|
||||||
return 0; // NULL pointer
|
return 0; // NULL pointer
|
||||||
}
|
}
|
||||||
@@ -133,11 +136,11 @@ static int validate_subscription_id(const char* sub_id) {
|
|||||||
return 0; // Empty or too long
|
return 0; // Empty or too long
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for valid characters (alphanumeric, underscore, hyphen, colon)
|
// Check for valid characters (alphanumeric, underscore, hyphen, colon, comma)
|
||||||
for (size_t i = 0; i < len; i++) {
|
for (size_t i = 0; i < len; i++) {
|
||||||
char c = sub_id[i];
|
char c = sub_id[i];
|
||||||
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
|
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
|
||||||
(c >= '0' && c <= '9') || c == '_' || c == '-' || c == ':')) {
|
(c >= '0' && c <= '9') || c == '_' || c == '-' || c == ':' || c == ',')) {
|
||||||
return 0; // Invalid character
|
return 0; // Invalid character
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -238,27 +241,81 @@ void free_subscription(subscription_t* sub) {
|
|||||||
// Add subscription to global manager (thread-safe)
|
// Add subscription to global manager (thread-safe)
|
||||||
int add_subscription_to_manager(subscription_t* sub) {
|
int add_subscription_to_manager(subscription_t* sub) {
|
||||||
if (!sub) return -1;
|
if (!sub) return -1;
|
||||||
|
|
||||||
pthread_mutex_lock(&g_subscription_manager.subscriptions_lock);
|
pthread_mutex_lock(&g_subscription_manager.subscriptions_lock);
|
||||||
|
|
||||||
// Check global limits
|
// Check for existing subscription with same ID and WebSocket connection
|
||||||
if (g_subscription_manager.total_subscriptions >= g_subscription_manager.max_total_subscriptions) {
|
// Remove it first to prevent duplicates (implements subscription replacement per NIP-01)
|
||||||
|
subscription_t** current = &g_subscription_manager.active_subscriptions;
|
||||||
|
int found_duplicate = 0;
|
||||||
|
subscription_t* duplicate_old = NULL;
|
||||||
|
|
||||||
|
while (*current) {
|
||||||
|
subscription_t* existing = *current;
|
||||||
|
|
||||||
|
// Match by subscription ID and WebSocket pointer
|
||||||
|
if (strcmp(existing->id, sub->id) == 0 && existing->wsi == sub->wsi) {
|
||||||
|
// Found duplicate: mark inactive and unlink from global list under lock
|
||||||
|
existing->active = 0;
|
||||||
|
*current = existing->next;
|
||||||
|
g_subscription_manager.total_subscriptions--;
|
||||||
|
found_duplicate = 1;
|
||||||
|
duplicate_old = existing; // defer free until after per-session unlink
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
current = &(existing->next);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check global limits (only if not replacing an existing subscription)
|
||||||
|
if (!found_duplicate && g_subscription_manager.total_subscriptions >= g_subscription_manager.max_total_subscriptions) {
|
||||||
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||||
DEBUG_ERROR("Maximum total subscriptions reached");
|
DEBUG_ERROR("Maximum total subscriptions reached");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add to global list
|
// Add to global list
|
||||||
sub->next = g_subscription_manager.active_subscriptions;
|
sub->next = g_subscription_manager.active_subscriptions;
|
||||||
g_subscription_manager.active_subscriptions = sub;
|
g_subscription_manager.active_subscriptions = sub;
|
||||||
g_subscription_manager.total_subscriptions++;
|
g_subscription_manager.total_subscriptions++;
|
||||||
g_subscription_manager.total_created++;
|
|
||||||
|
// Only increment total_created if this is a new subscription (not a replacement)
|
||||||
|
if (!found_duplicate) {
|
||||||
|
g_subscription_manager.total_created++;
|
||||||
|
}
|
||||||
|
|
||||||
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||||
|
|
||||||
// Log subscription creation to database
|
// If we replaced an existing subscription, unlink it from the per-session list before freeing
|
||||||
|
if (duplicate_old) {
|
||||||
|
// Obtain per-session data for this wsi
|
||||||
|
struct per_session_data* pss = (struct per_session_data*) lws_wsi_user(duplicate_old->wsi);
|
||||||
|
if (pss) {
|
||||||
|
pthread_mutex_lock(&pss->session_lock);
|
||||||
|
struct subscription** scur = &pss->subscriptions;
|
||||||
|
while (*scur) {
|
||||||
|
if (*scur == duplicate_old) {
|
||||||
|
// Unlink by pointer identity to avoid removing the newly-added one
|
||||||
|
*scur = duplicate_old->session_next;
|
||||||
|
if (pss->subscription_count > 0) {
|
||||||
|
pss->subscription_count--;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
scur = &((*scur)->session_next);
|
||||||
|
}
|
||||||
|
pthread_mutex_unlock(&pss->session_lock);
|
||||||
|
}
|
||||||
|
// Now safe to free the old subscription
|
||||||
|
free_subscription(duplicate_old);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log subscription creation to database (INSERT OR REPLACE handles duplicates)
|
||||||
log_subscription_created(sub);
|
log_subscription_created(sub);
|
||||||
|
|
||||||
|
// Trigger monitoring update for subscription changes
|
||||||
|
monitoring_on_subscription_change();
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -306,6 +363,9 @@ int remove_subscription_from_manager(const char* sub_id, struct lws* wsi) {
|
|||||||
// Update events sent counter before freeing
|
// Update events sent counter before freeing
|
||||||
update_subscription_events_sent(sub_id_copy, events_sent_copy);
|
update_subscription_events_sent(sub_id_copy, events_sent_copy);
|
||||||
|
|
||||||
|
// Trigger monitoring update for subscription changes
|
||||||
|
monitoring_on_subscription_change();
|
||||||
|
|
||||||
free_subscription(sub);
|
free_subscription(sub);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@@ -324,10 +384,7 @@ int remove_subscription_from_manager(const char* sub_id, struct lws* wsi) {
|
|||||||
|
|
||||||
// Check if an event matches a subscription filter
|
// Check if an event matches a subscription filter
|
||||||
int event_matches_filter(cJSON* event, subscription_filter_t* filter) {
|
int event_matches_filter(cJSON* event, subscription_filter_t* filter) {
|
||||||
DEBUG_TRACE("Checking event against subscription filter");
|
|
||||||
|
|
||||||
if (!event || !filter) {
|
if (!event || !filter) {
|
||||||
DEBUG_TRACE("Exiting event_matches_filter - null parameters");
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -503,7 +560,6 @@ int event_matches_filter(cJSON* event, subscription_filter_t* filter) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DEBUG_TRACE("Exiting event_matches_filter - match found");
|
|
||||||
return 1; // All filters passed
|
return 1; // All filters passed
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -526,10 +582,7 @@ int event_matches_subscription(cJSON* event, subscription_t* subscription) {
|
|||||||
|
|
||||||
// Broadcast event to all matching subscriptions (thread-safe)
|
// Broadcast event to all matching subscriptions (thread-safe)
|
||||||
int broadcast_event_to_subscriptions(cJSON* event) {
|
int broadcast_event_to_subscriptions(cJSON* event) {
|
||||||
DEBUG_TRACE("Broadcasting event to subscriptions");
|
|
||||||
|
|
||||||
if (!event) {
|
if (!event) {
|
||||||
DEBUG_TRACE("Exiting broadcast_event_to_subscriptions - null event");
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -611,12 +664,19 @@ int broadcast_event_to_subscriptions(cJSON* event) {
|
|||||||
if (buf) {
|
if (buf) {
|
||||||
memcpy(buf + LWS_PRE, msg_str, msg_len);
|
memcpy(buf + LWS_PRE, msg_str, msg_len);
|
||||||
|
|
||||||
// Send to WebSocket connection with error checking
|
// DEBUG: Log WebSocket frame details before sending
|
||||||
// Note: lws_write can fail if connection is closed, but won't crash
|
DEBUG_TRACE("WS_FRAME_SEND: type=EVENT sub=%s len=%zu data=%.100s%s",
|
||||||
int write_result = lws_write(current_temp->wsi, buf + LWS_PRE, msg_len, LWS_WRITE_TEXT);
|
current_temp->id,
|
||||||
if (write_result >= 0) {
|
msg_len,
|
||||||
|
msg_str,
|
||||||
|
msg_len > 100 ? "..." : "");
|
||||||
|
|
||||||
|
// Queue message for proper libwebsockets pattern
|
||||||
|
struct per_session_data* pss = (struct per_session_data*)lws_wsi_user(current_temp->wsi);
|
||||||
|
if (queue_message(current_temp->wsi, pss, msg_str, msg_len, LWS_WRITE_TEXT) == 0) {
|
||||||
|
// Message queued successfully
|
||||||
broadcasts++;
|
broadcasts++;
|
||||||
|
|
||||||
// Update events sent counter for this subscription
|
// Update events sent counter for this subscription
|
||||||
pthread_mutex_lock(&g_subscription_manager.subscriptions_lock);
|
pthread_mutex_lock(&g_subscription_manager.subscriptions_lock);
|
||||||
subscription_t* update_sub = g_subscription_manager.active_subscriptions;
|
subscription_t* update_sub = g_subscription_manager.active_subscriptions;
|
||||||
@@ -630,12 +690,14 @@ int broadcast_event_to_subscriptions(cJSON* event) {
|
|||||||
update_sub = update_sub->next;
|
update_sub = update_sub->next;
|
||||||
}
|
}
|
||||||
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||||
|
|
||||||
// Log event broadcast to database (optional - can be disabled for performance)
|
// Log event broadcast to database (optional - can be disabled for performance)
|
||||||
cJSON* event_id_obj = cJSON_GetObjectItem(event, "id");
|
cJSON* event_id_obj = cJSON_GetObjectItem(event, "id");
|
||||||
if (event_id_obj && cJSON_IsString(event_id_obj)) {
|
if (event_id_obj && cJSON_IsString(event_id_obj)) {
|
||||||
log_event_broadcast(cJSON_GetStringValue(event_id_obj), current_temp->id, current_temp->client_ip);
|
log_event_broadcast(cJSON_GetStringValue(event_id_obj), current_temp->id, current_temp->client_ip);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
DEBUG_ERROR("Failed to queue EVENT message for sub=%s", current_temp->id);
|
||||||
}
|
}
|
||||||
|
|
||||||
free(buf);
|
free(buf);
|
||||||
@@ -660,10 +722,41 @@ int broadcast_event_to_subscriptions(cJSON* event) {
|
|||||||
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||||
|
|
||||||
DEBUG_LOG("Event broadcast complete: %d subscriptions matched", broadcasts);
|
DEBUG_LOG("Event broadcast complete: %d subscriptions matched", broadcasts);
|
||||||
DEBUG_TRACE("Exiting broadcast_event_to_subscriptions");
|
|
||||||
return broadcasts;
|
return broadcasts;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if any active subscription exists for a specific event kind (thread-safe)
|
||||||
|
int has_subscriptions_for_kind(int event_kind) {
|
||||||
|
pthread_mutex_lock(&g_subscription_manager.subscriptions_lock);
|
||||||
|
|
||||||
|
subscription_t* sub = g_subscription_manager.active_subscriptions;
|
||||||
|
while (sub) {
|
||||||
|
if (sub->active && sub->filters) {
|
||||||
|
subscription_filter_t* filter = sub->filters;
|
||||||
|
while (filter) {
|
||||||
|
// Check if this filter includes our event kind
|
||||||
|
if (filter->kinds && cJSON_IsArray(filter->kinds)) {
|
||||||
|
cJSON* kind_item = NULL;
|
||||||
|
cJSON_ArrayForEach(kind_item, filter->kinds) {
|
||||||
|
if (cJSON_IsNumber(kind_item)) {
|
||||||
|
int filter_kind = (int)cJSON_GetNumberValue(kind_item);
|
||||||
|
if (filter_kind == event_kind) {
|
||||||
|
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||||
|
return 1; // Found matching subscription
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
filter = filter->next;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sub = sub->next;
|
||||||
|
}
|
||||||
|
|
||||||
|
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||||
|
return 0; // No matching subscriptions
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/////////////////////////////////////////////////////////////////////////////////////////
|
/////////////////////////////////////////////////////////////////////////////////////////
|
||||||
/////////////////////////////////////////////////////////////////////////////////////////
|
/////////////////////////////////////////////////////////////////////////////////////////
|
||||||
@@ -675,6 +768,10 @@ int broadcast_event_to_subscriptions(cJSON* event) {
|
|||||||
void log_subscription_created(const subscription_t* sub) {
|
void log_subscription_created(const subscription_t* sub) {
|
||||||
if (!g_db || !sub) return;
|
if (!g_db || !sub) return;
|
||||||
|
|
||||||
|
// Convert wsi pointer to string
|
||||||
|
char wsi_str[32];
|
||||||
|
snprintf(wsi_str, sizeof(wsi_str), "%p", (void*)sub->wsi);
|
||||||
|
|
||||||
// Create filter JSON for logging
|
// Create filter JSON for logging
|
||||||
char* filter_json = NULL;
|
char* filter_json = NULL;
|
||||||
if (sub->filters) {
|
if (sub->filters) {
|
||||||
@@ -721,16 +818,18 @@ void log_subscription_created(const subscription_t* sub) {
|
|||||||
cJSON_Delete(filters_array);
|
cJSON_Delete(filters_array);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Use INSERT OR REPLACE to handle duplicates automatically
|
||||||
const char* sql =
|
const char* sql =
|
||||||
"INSERT INTO subscription_events (subscription_id, client_ip, event_type, filter_json) "
|
"INSERT OR REPLACE INTO subscriptions (subscription_id, wsi_pointer, client_ip, event_type, filter_json) "
|
||||||
"VALUES (?, ?, 'created', ?)";
|
"VALUES (?, ?, ?, 'created', ?)";
|
||||||
|
|
||||||
sqlite3_stmt* stmt;
|
sqlite3_stmt* stmt;
|
||||||
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
||||||
if (rc == SQLITE_OK) {
|
if (rc == SQLITE_OK) {
|
||||||
sqlite3_bind_text(stmt, 1, sub->id, -1, SQLITE_STATIC);
|
sqlite3_bind_text(stmt, 1, sub->id, -1, SQLITE_STATIC);
|
||||||
sqlite3_bind_text(stmt, 2, sub->client_ip, -1, SQLITE_STATIC);
|
sqlite3_bind_text(stmt, 2, wsi_str, -1, SQLITE_TRANSIENT);
|
||||||
sqlite3_bind_text(stmt, 3, filter_json ? filter_json : "[]", -1, SQLITE_TRANSIENT);
|
sqlite3_bind_text(stmt, 3, sub->client_ip, -1, SQLITE_STATIC);
|
||||||
|
sqlite3_bind_text(stmt, 4, filter_json ? filter_json : "[]", -1, SQLITE_TRANSIENT);
|
||||||
|
|
||||||
sqlite3_step(stmt);
|
sqlite3_step(stmt);
|
||||||
sqlite3_finalize(stmt);
|
sqlite3_finalize(stmt);
|
||||||
@@ -745,8 +844,8 @@ void log_subscription_closed(const char* sub_id, const char* client_ip, const ch
|
|||||||
if (!g_db || !sub_id) return;
|
if (!g_db || !sub_id) return;
|
||||||
|
|
||||||
const char* sql =
|
const char* sql =
|
||||||
"INSERT INTO subscription_events (subscription_id, client_ip, event_type) "
|
"INSERT INTO subscriptions (subscription_id, wsi_pointer, client_ip, event_type) "
|
||||||
"VALUES (?, ?, 'closed')";
|
"VALUES (?, '', ?, 'closed')";
|
||||||
|
|
||||||
sqlite3_stmt* stmt;
|
sqlite3_stmt* stmt;
|
||||||
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
int rc = sqlite3_prepare_v2(g_db, sql, -1, &stmt, NULL);
|
||||||
@@ -760,7 +859,7 @@ void log_subscription_closed(const char* sub_id, const char* client_ip, const ch
|
|||||||
|
|
||||||
// Update the corresponding 'created' entry with end time and events sent
|
// Update the corresponding 'created' entry with end time and events sent
|
||||||
const char* update_sql =
|
const char* update_sql =
|
||||||
"UPDATE subscription_events "
|
"UPDATE subscriptions "
|
||||||
"SET ended_at = strftime('%s', 'now') "
|
"SET ended_at = strftime('%s', 'now') "
|
||||||
"WHERE subscription_id = ? AND event_type = 'created' AND ended_at IS NULL";
|
"WHERE subscription_id = ? AND event_type = 'created' AND ended_at IS NULL";
|
||||||
|
|
||||||
@@ -778,7 +877,7 @@ void log_subscription_disconnected(const char* client_ip) {
|
|||||||
|
|
||||||
// Mark all active subscriptions for this client as disconnected
|
// Mark all active subscriptions for this client as disconnected
|
||||||
const char* sql =
|
const char* sql =
|
||||||
"UPDATE subscription_events "
|
"UPDATE subscriptions "
|
||||||
"SET ended_at = strftime('%s', 'now') "
|
"SET ended_at = strftime('%s', 'now') "
|
||||||
"WHERE client_ip = ? AND event_type = 'created' AND ended_at IS NULL";
|
"WHERE client_ip = ? AND event_type = 'created' AND ended_at IS NULL";
|
||||||
|
|
||||||
@@ -793,8 +892,8 @@ void log_subscription_disconnected(const char* client_ip) {
|
|||||||
if (changes > 0) {
|
if (changes > 0) {
|
||||||
// Log a disconnection event
|
// Log a disconnection event
|
||||||
const char* insert_sql =
|
const char* insert_sql =
|
||||||
"INSERT INTO subscription_events (subscription_id, client_ip, event_type) "
|
"INSERT INTO subscriptions (subscription_id, wsi_pointer, client_ip, event_type) "
|
||||||
"VALUES ('disconnect', ?, 'disconnected')";
|
"VALUES ('disconnect', '', ?, 'disconnected')";
|
||||||
|
|
||||||
rc = sqlite3_prepare_v2(g_db, insert_sql, -1, &stmt, NULL);
|
rc = sqlite3_prepare_v2(g_db, insert_sql, -1, &stmt, NULL);
|
||||||
if (rc == SQLITE_OK) {
|
if (rc == SQLITE_OK) {
|
||||||
@@ -831,7 +930,7 @@ void update_subscription_events_sent(const char* sub_id, int events_sent) {
|
|||||||
if (!g_db || !sub_id) return;
|
if (!g_db || !sub_id) return;
|
||||||
|
|
||||||
const char* sql =
|
const char* sql =
|
||||||
"UPDATE subscription_events "
|
"UPDATE subscriptions "
|
||||||
"SET events_sent = ? "
|
"SET events_sent = ? "
|
||||||
"WHERE subscription_id = ? AND event_type = 'created'";
|
"WHERE subscription_id = ? AND event_type = 'created'";
|
||||||
|
|
||||||
|
|||||||
@@ -93,6 +93,7 @@ struct subscription_manager {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Function declarations
|
// Function declarations
|
||||||
|
int validate_subscription_id(const char* sub_id);
|
||||||
subscription_filter_t* create_subscription_filter(cJSON* filter_json);
|
subscription_filter_t* create_subscription_filter(cJSON* filter_json);
|
||||||
void free_subscription_filter(subscription_filter_t* filter);
|
void free_subscription_filter(subscription_filter_t* filter);
|
||||||
subscription_t* create_subscription(const char* sub_id, struct lws* wsi, cJSON* filters_array, const char* client_ip);
|
subscription_t* create_subscription(const char* sub_id, struct lws* wsi, cJSON* filters_array, const char* client_ip);
|
||||||
@@ -117,4 +118,7 @@ void log_subscription_disconnected(const char* client_ip);
|
|||||||
void log_event_broadcast(const char* event_id, const char* sub_id, const char* client_ip);
|
void log_event_broadcast(const char* event_id, const char* sub_id, const char* client_ip);
|
||||||
void update_subscription_events_sent(const char* sub_id, int events_sent);
|
void update_subscription_events_sent(const char* sub_id, int events_sent);
|
||||||
|
|
||||||
|
// Subscription query functions
|
||||||
|
int has_subscriptions_for_kind(int event_kind);
|
||||||
|
|
||||||
#endif // SUBSCRIPTIONS_H
|
#endif // SUBSCRIPTIONS_H
|
||||||
442
src/websockets.c
442
src/websockets.c
@@ -108,6 +108,136 @@ struct subscription_manager g_subscription_manager;
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// Message queue functions for proper libwebsockets pattern
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Queue a message for WebSocket writing following libwebsockets' proper pattern.
|
||||||
|
* This function adds messages to a per-session queue and requests writeable callback.
|
||||||
|
*
|
||||||
|
* @param wsi WebSocket instance
|
||||||
|
* @param pss Per-session data containing message queue
|
||||||
|
* @param message Message string to write
|
||||||
|
* @param length Length of message string
|
||||||
|
* @param type LWS_WRITE_* type (LWS_WRITE_TEXT, etc.)
|
||||||
|
* @return 0 on success, -1 on error
|
||||||
|
*/
|
||||||
|
int queue_message(struct lws* wsi, struct per_session_data* pss, const char* message, size_t length, enum lws_write_protocol type) {
|
||||||
|
if (!wsi || !pss || !message || length == 0) {
|
||||||
|
DEBUG_ERROR("queue_message: invalid parameters");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allocate message queue node
|
||||||
|
struct message_queue_node* node = malloc(sizeof(struct message_queue_node));
|
||||||
|
if (!node) {
|
||||||
|
DEBUG_ERROR("queue_message: failed to allocate queue node");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allocate buffer with LWS_PRE space
|
||||||
|
size_t buffer_size = LWS_PRE + length;
|
||||||
|
unsigned char* buffer = malloc(buffer_size);
|
||||||
|
if (!buffer) {
|
||||||
|
DEBUG_ERROR("queue_message: failed to allocate message buffer");
|
||||||
|
free(node);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy message to buffer with LWS_PRE offset
|
||||||
|
memcpy(buffer + LWS_PRE, message, length);
|
||||||
|
|
||||||
|
// Initialize node
|
||||||
|
node->data = buffer;
|
||||||
|
node->length = length;
|
||||||
|
node->type = type;
|
||||||
|
node->next = NULL;
|
||||||
|
|
||||||
|
// Add to queue (thread-safe)
|
||||||
|
pthread_mutex_lock(&pss->session_lock);
|
||||||
|
|
||||||
|
if (!pss->message_queue_head) {
|
||||||
|
// Queue was empty
|
||||||
|
pss->message_queue_head = node;
|
||||||
|
pss->message_queue_tail = node;
|
||||||
|
} else {
|
||||||
|
// Add to end of queue
|
||||||
|
pss->message_queue_tail->next = node;
|
||||||
|
pss->message_queue_tail = node;
|
||||||
|
}
|
||||||
|
pss->message_queue_count++;
|
||||||
|
|
||||||
|
pthread_mutex_unlock(&pss->session_lock);
|
||||||
|
|
||||||
|
// Request writeable callback (only if not already requested)
|
||||||
|
if (!pss->writeable_requested) {
|
||||||
|
pss->writeable_requested = 1;
|
||||||
|
lws_callback_on_writable(wsi);
|
||||||
|
}
|
||||||
|
|
||||||
|
DEBUG_TRACE("Queued message: len=%zu, queue_count=%d", length, pss->message_queue_count);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process message queue when the socket becomes writeable.
|
||||||
|
* This function is called from LWS_CALLBACK_SERVER_WRITEABLE.
|
||||||
|
*
|
||||||
|
* @param wsi WebSocket instance
|
||||||
|
* @param pss Per-session data containing message queue
|
||||||
|
* @return 0 on success, -1 on error
|
||||||
|
*/
|
||||||
|
int process_message_queue(struct lws* wsi, struct per_session_data* pss) {
|
||||||
|
if (!wsi || !pss) {
|
||||||
|
DEBUG_ERROR("process_message_queue: invalid parameters");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get next message from queue (thread-safe)
|
||||||
|
pthread_mutex_lock(&pss->session_lock);
|
||||||
|
|
||||||
|
struct message_queue_node* node = pss->message_queue_head;
|
||||||
|
if (!node) {
|
||||||
|
// Queue is empty
|
||||||
|
pss->writeable_requested = 0;
|
||||||
|
pthread_mutex_unlock(&pss->session_lock);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove from queue
|
||||||
|
pss->message_queue_head = node->next;
|
||||||
|
if (!pss->message_queue_head) {
|
||||||
|
pss->message_queue_tail = NULL;
|
||||||
|
}
|
||||||
|
pss->message_queue_count--;
|
||||||
|
|
||||||
|
pthread_mutex_unlock(&pss->session_lock);
|
||||||
|
|
||||||
|
// Write message (libwebsockets handles partial writes internally)
|
||||||
|
int write_result = lws_write(wsi, node->data + LWS_PRE, node->length, node->type);
|
||||||
|
|
||||||
|
// Free node resources
|
||||||
|
free(node->data);
|
||||||
|
free(node);
|
||||||
|
|
||||||
|
if (write_result < 0) {
|
||||||
|
DEBUG_ERROR("process_message_queue: write failed, result=%d", write_result);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
DEBUG_TRACE("Processed message: wrote %d bytes, remaining in queue: %d", write_result, pss->message_queue_count);
|
||||||
|
|
||||||
|
// If queue not empty, request another callback
|
||||||
|
pthread_mutex_lock(&pss->session_lock);
|
||||||
|
if (pss->message_queue_head) {
|
||||||
|
lws_callback_on_writable(wsi);
|
||||||
|
} else {
|
||||||
|
pss->writeable_requested = 0;
|
||||||
|
}
|
||||||
|
pthread_mutex_unlock(&pss->session_lock);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
/////////////////////////////////////////////////////////////////////////////////////////
|
/////////////////////////////////////////////////////////////////////////////////////////
|
||||||
/////////////////////////////////////////////////////////////////////////////////////////
|
/////////////////////////////////////////////////////////////////////////////////////////
|
||||||
// WEBSOCKET PROTOCOL
|
// WEBSOCKET PROTOCOL
|
||||||
@@ -247,7 +377,57 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
|
|
||||||
// Get real client IP address
|
// Get real client IP address
|
||||||
char client_ip[CLIENT_IP_MAX_LENGTH];
|
char client_ip[CLIENT_IP_MAX_LENGTH];
|
||||||
lws_get_peer_simple(wsi, client_ip, sizeof(client_ip));
|
memset(client_ip, 0, sizeof(client_ip));
|
||||||
|
|
||||||
|
// Check if we should trust proxy headers
|
||||||
|
int trust_proxy = get_config_bool("trust_proxy_headers", 0);
|
||||||
|
|
||||||
|
if (trust_proxy) {
|
||||||
|
// Try to get IP from X-Forwarded-For header first
|
||||||
|
char x_forwarded_for[CLIENT_IP_MAX_LENGTH];
|
||||||
|
int header_len = lws_hdr_copy(wsi, x_forwarded_for, sizeof(x_forwarded_for) - 1, WSI_TOKEN_X_FORWARDED_FOR);
|
||||||
|
|
||||||
|
if (header_len > 0) {
|
||||||
|
x_forwarded_for[header_len] = '\0';
|
||||||
|
// X-Forwarded-For can contain multiple IPs (client, proxy1, proxy2, ...)
|
||||||
|
// We want the first (leftmost) IP which is the original client
|
||||||
|
char* comma = strchr(x_forwarded_for, ',');
|
||||||
|
if (comma) {
|
||||||
|
*comma = '\0'; // Truncate at first comma
|
||||||
|
}
|
||||||
|
// Trim leading/trailing whitespace
|
||||||
|
char* ip_start = x_forwarded_for;
|
||||||
|
while (*ip_start == ' ' || *ip_start == '\t') ip_start++;
|
||||||
|
size_t ip_len = strlen(ip_start);
|
||||||
|
while (ip_len > 0 && (ip_start[ip_len-1] == ' ' || ip_start[ip_len-1] == '\t')) {
|
||||||
|
ip_start[--ip_len] = '\0';
|
||||||
|
}
|
||||||
|
if (ip_len > 0 && ip_len < CLIENT_IP_MAX_LENGTH) {
|
||||||
|
strncpy(client_ip, ip_start, CLIENT_IP_MAX_LENGTH - 1);
|
||||||
|
client_ip[CLIENT_IP_MAX_LENGTH - 1] = '\0';
|
||||||
|
DEBUG_TRACE("Using X-Forwarded-For IP: %s", client_ip);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If X-Forwarded-For didn't work, try X-Real-IP
|
||||||
|
if (client_ip[0] == '\0') {
|
||||||
|
char x_real_ip[CLIENT_IP_MAX_LENGTH];
|
||||||
|
header_len = lws_hdr_copy(wsi, x_real_ip, sizeof(x_real_ip) - 1, WSI_TOKEN_HTTP_X_REAL_IP);
|
||||||
|
|
||||||
|
if (header_len > 0) {
|
||||||
|
x_real_ip[header_len] = '\0';
|
||||||
|
strncpy(client_ip, x_real_ip, CLIENT_IP_MAX_LENGTH - 1);
|
||||||
|
client_ip[CLIENT_IP_MAX_LENGTH - 1] = '\0';
|
||||||
|
DEBUG_TRACE("Using X-Real-IP: %s", client_ip);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to direct connection IP if proxy headers not available or not trusted
|
||||||
|
if (client_ip[0] == '\0') {
|
||||||
|
lws_get_peer_simple(wsi, client_ip, sizeof(client_ip));
|
||||||
|
DEBUG_TRACE("Using direct connection IP: %s", client_ip);
|
||||||
|
}
|
||||||
|
|
||||||
// Ensure client_ip is null-terminated and copy safely
|
// Ensure client_ip is null-terminated and copy safely
|
||||||
client_ip[CLIENT_IP_MAX_LENGTH - 1] = '\0';
|
client_ip[CLIENT_IP_MAX_LENGTH - 1] = '\0';
|
||||||
@@ -256,6 +436,9 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
memcpy(pss->client_ip, client_ip, copy_len);
|
memcpy(pss->client_ip, client_ip, copy_len);
|
||||||
pss->client_ip[copy_len] = '\0';
|
pss->client_ip[copy_len] = '\0';
|
||||||
|
|
||||||
|
// Record connection establishment time for duration tracking
|
||||||
|
pss->connection_established = time(NULL);
|
||||||
|
|
||||||
DEBUG_LOG("WebSocket connection established from %s", pss->client_ip);
|
DEBUG_LOG("WebSocket connection established from %s", pss->client_ip);
|
||||||
|
|
||||||
// Initialize NIP-42 authentication state
|
// Initialize NIP-42 authentication state
|
||||||
@@ -516,7 +699,7 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
if (event_kind == 23456) {
|
if (event_kind == 23456) {
|
||||||
if (admin_result != 0) {
|
if (admin_result != 0) {
|
||||||
char error_result_msg[512];
|
char error_result_msg[512];
|
||||||
if (admin_error && strlen(admin_error) > 0) {
|
if (strlen(admin_error) > 0) {
|
||||||
// Safely truncate admin_error if too long
|
// Safely truncate admin_error if too long
|
||||||
size_t max_error_len = sizeof(error_result_msg) - 50; // Leave room for prefix
|
size_t max_error_len = sizeof(error_result_msg) - 50; // Leave room for prefix
|
||||||
size_t error_len = strlen(admin_error);
|
size_t error_len = strlen(admin_error);
|
||||||
@@ -532,7 +715,12 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
"ERROR: Kind %d event processing failed: ", event_kind);
|
"ERROR: Kind %d event processing failed: ", event_kind);
|
||||||
if (prefix_len < sizeof(error_result_msg)) {
|
if (prefix_len < sizeof(error_result_msg)) {
|
||||||
size_t remaining = sizeof(error_result_msg) - prefix_len;
|
size_t remaining = sizeof(error_result_msg) - prefix_len;
|
||||||
strncat(error_result_msg, truncated_error, remaining - 1);
|
size_t copy_len = strlen(truncated_error);
|
||||||
|
if (copy_len >= remaining) {
|
||||||
|
copy_len = remaining - 1;
|
||||||
|
}
|
||||||
|
memcpy(error_result_msg + prefix_len, truncated_error, copy_len);
|
||||||
|
error_result_msg[prefix_len + copy_len] = '\0';
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
snprintf(error_result_msg, sizeof(error_result_msg),
|
snprintf(error_result_msg, sizeof(error_result_msg),
|
||||||
@@ -620,16 +808,24 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
DEBUG_TRACE("Storing regular event in database");
|
// Check if this is an ephemeral event (kinds 20000-29999)
|
||||||
// Regular event - store in database and broadcast
|
// Per NIP-01: ephemeral events are broadcast but never stored
|
||||||
if (store_event(event) != 0) {
|
if (event_kind >= 20000 && event_kind < 30000) {
|
||||||
DEBUG_ERROR("Failed to store event in database");
|
DEBUG_TRACE("Ephemeral event (kind %d) - broadcasting without storage", event_kind);
|
||||||
result = -1;
|
// Broadcast directly to subscriptions without database storage
|
||||||
strncpy(error_message, "error: failed to store event", sizeof(error_message) - 1);
|
|
||||||
} else {
|
|
||||||
DEBUG_LOG("Event stored and broadcast (kind %d)", event_kind);
|
|
||||||
// Broadcast event to matching persistent subscriptions
|
|
||||||
broadcast_event_to_subscriptions(event);
|
broadcast_event_to_subscriptions(event);
|
||||||
|
} else {
|
||||||
|
DEBUG_TRACE("Storing regular event in database");
|
||||||
|
// Regular event - store in database and broadcast
|
||||||
|
if (store_event(event) != 0) {
|
||||||
|
DEBUG_ERROR("Failed to store event in database");
|
||||||
|
result = -1;
|
||||||
|
strncpy(error_message, "error: failed to store event", sizeof(error_message) - 1);
|
||||||
|
} else {
|
||||||
|
DEBUG_LOG("Event stored and broadcast (kind %d)", event_kind);
|
||||||
|
// Broadcast event to matching persistent subscriptions
|
||||||
|
broadcast_event_to_subscriptions(event);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -653,16 +849,22 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
cJSON_AddItemToArray(response, cJSON_CreateString(cJSON_GetStringValue(event_id)));
|
cJSON_AddItemToArray(response, cJSON_CreateString(cJSON_GetStringValue(event_id)));
|
||||||
cJSON_AddItemToArray(response, cJSON_CreateBool(result == 0));
|
cJSON_AddItemToArray(response, cJSON_CreateBool(result == 0));
|
||||||
cJSON_AddItemToArray(response, cJSON_CreateString(strlen(error_message) > 0 ? error_message : ""));
|
cJSON_AddItemToArray(response, cJSON_CreateString(strlen(error_message) > 0 ? error_message : ""));
|
||||||
|
|
||||||
char *response_str = cJSON_Print(response);
|
char *response_str = cJSON_Print(response);
|
||||||
if (response_str) {
|
if (response_str) {
|
||||||
size_t response_len = strlen(response_str);
|
size_t response_len = strlen(response_str);
|
||||||
unsigned char *buf = malloc(LWS_PRE + response_len);
|
|
||||||
if (buf) {
|
// DEBUG: Log WebSocket frame details before sending
|
||||||
memcpy(buf + LWS_PRE, response_str, response_len);
|
DEBUG_TRACE("WS_FRAME_SEND: type=OK len=%zu data=%.100s%s",
|
||||||
lws_write(wsi, buf + LWS_PRE, response_len, LWS_WRITE_TEXT);
|
response_len,
|
||||||
free(buf);
|
response_str,
|
||||||
|
response_len > 100 ? "..." : "");
|
||||||
|
|
||||||
|
// Queue message for proper libwebsockets pattern
|
||||||
|
if (queue_message(wsi, pss, response_str, response_len, LWS_WRITE_TEXT) != 0) {
|
||||||
|
DEBUG_ERROR("Failed to queue OK response message");
|
||||||
}
|
}
|
||||||
|
|
||||||
free(response_str);
|
free(response_str);
|
||||||
}
|
}
|
||||||
cJSON_Delete(response);
|
cJSON_Delete(response);
|
||||||
@@ -702,38 +904,10 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check subscription ID format and length
|
// Validate subscription ID
|
||||||
size_t id_len = strlen(subscription_id);
|
if (!validate_subscription_id(subscription_id)) {
|
||||||
if (id_len == 0 || id_len >= SUBSCRIPTION_ID_MAX_LENGTH) {
|
send_notice_message(wsi, "error: invalid subscription ID");
|
||||||
send_notice_message(wsi, "error: subscription ID too long or empty");
|
DEBUG_WARN("REQ rejected: invalid subscription ID");
|
||||||
DEBUG_WARN("REQ rejected: invalid subscription ID length");
|
|
||||||
cJSON_Delete(json);
|
|
||||||
free(message);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate characters in subscription ID
|
|
||||||
int valid_id = 1;
|
|
||||||
char invalid_char = '\0';
|
|
||||||
size_t invalid_pos = 0;
|
|
||||||
for (size_t i = 0; i < id_len; i++) {
|
|
||||||
char c = subscription_id[i];
|
|
||||||
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
|
|
||||||
(c >= '0' && c <= '9') || c == '_' || c == '-' || c == ':')) {
|
|
||||||
valid_id = 0;
|
|
||||||
invalid_char = c;
|
|
||||||
invalid_pos = i;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!valid_id) {
|
|
||||||
char debug_msg[512];
|
|
||||||
snprintf(debug_msg, sizeof(debug_msg),
|
|
||||||
"REQ rejected: invalid character '%c' (0x%02X) at position %zu in subscription ID: '%s'",
|
|
||||||
invalid_char, (unsigned char)invalid_char, invalid_pos, subscription_id);
|
|
||||||
DEBUG_WARN(debug_msg);
|
|
||||||
send_notice_message(wsi, "error: invalid characters in subscription ID");
|
|
||||||
cJSON_Delete(json);
|
cJSON_Delete(json);
|
||||||
free(message);
|
free(message);
|
||||||
return 0;
|
return 0;
|
||||||
@@ -785,12 +959,18 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
char *eose_str = cJSON_Print(eose_response);
|
char *eose_str = cJSON_Print(eose_response);
|
||||||
if (eose_str) {
|
if (eose_str) {
|
||||||
size_t eose_len = strlen(eose_str);
|
size_t eose_len = strlen(eose_str);
|
||||||
unsigned char *buf = malloc(LWS_PRE + eose_len);
|
|
||||||
if (buf) {
|
// DEBUG: Log WebSocket frame details before sending
|
||||||
memcpy(buf + LWS_PRE, eose_str, eose_len);
|
DEBUG_TRACE("WS_FRAME_SEND: type=EOSE len=%zu data=%.100s%s",
|
||||||
lws_write(wsi, buf + LWS_PRE, eose_len, LWS_WRITE_TEXT);
|
eose_len,
|
||||||
free(buf);
|
eose_str,
|
||||||
|
eose_len > 100 ? "..." : "");
|
||||||
|
|
||||||
|
// Queue message for proper libwebsockets pattern
|
||||||
|
if (queue_message(wsi, pss, eose_str, eose_len, LWS_WRITE_TEXT) != 0) {
|
||||||
|
DEBUG_ERROR("Failed to queue EOSE message");
|
||||||
}
|
}
|
||||||
|
|
||||||
free(eose_str);
|
free(eose_str);
|
||||||
}
|
}
|
||||||
cJSON_Delete(eose_response);
|
cJSON_Delete(eose_response);
|
||||||
@@ -861,39 +1041,31 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check subscription ID format and length
|
// Validate subscription ID
|
||||||
size_t id_len = strlen(subscription_id);
|
if (!validate_subscription_id(subscription_id)) {
|
||||||
if (id_len == 0 || id_len >= SUBSCRIPTION_ID_MAX_LENGTH) {
|
send_notice_message(wsi, "error: invalid subscription ID in CLOSE");
|
||||||
send_notice_message(wsi, "error: subscription ID too long or empty in CLOSE");
|
DEBUG_WARN("CLOSE rejected: invalid subscription ID");
|
||||||
DEBUG_WARN("CLOSE rejected: invalid subscription ID length");
|
|
||||||
cJSON_Delete(json);
|
cJSON_Delete(json);
|
||||||
free(message);
|
free(message);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate characters in subscription ID
|
// CRITICAL FIX: Mark subscription as inactive in global manager FIRST
|
||||||
int valid_id = 1;
|
// This prevents other threads from accessing it during removal
|
||||||
for (size_t i = 0; i < id_len; i++) {
|
pthread_mutex_lock(&g_subscription_manager.subscriptions_lock);
|
||||||
char c = subscription_id[i];
|
|
||||||
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
|
subscription_t* target_sub = g_subscription_manager.active_subscriptions;
|
||||||
(c >= '0' && c <= '9') || c == '_' || c == '-' || c == ':')) {
|
while (target_sub) {
|
||||||
valid_id = 0;
|
if (strcmp(target_sub->id, subscription_id) == 0 && target_sub->wsi == wsi) {
|
||||||
|
target_sub->active = 0; // Mark as inactive immediately
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
target_sub = target_sub->next;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!valid_id) {
|
pthread_mutex_unlock(&g_subscription_manager.subscriptions_lock);
|
||||||
send_notice_message(wsi, "error: invalid characters in subscription ID for CLOSE");
|
|
||||||
DEBUG_WARN("CLOSE rejected: invalid characters in subscription ID");
|
|
||||||
cJSON_Delete(json);
|
|
||||||
free(message);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove from global manager
|
// Now safe to remove from session list
|
||||||
remove_subscription_from_manager(subscription_id, wsi);
|
|
||||||
|
|
||||||
// Remove from session list if present
|
|
||||||
if (pss) {
|
if (pss) {
|
||||||
pthread_mutex_lock(&pss->session_lock);
|
pthread_mutex_lock(&pss->session_lock);
|
||||||
|
|
||||||
@@ -911,6 +1083,9 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
pthread_mutex_unlock(&pss->session_lock);
|
pthread_mutex_unlock(&pss->session_lock);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Finally remove from global manager (which will free it)
|
||||||
|
remove_subscription_from_manager(subscription_id, wsi);
|
||||||
|
|
||||||
// Subscription closed
|
// Subscription closed
|
||||||
} else {
|
} else {
|
||||||
send_notice_message(wsi, "error: missing or invalid subscription ID in CLOSE");
|
send_notice_message(wsi, "error: missing or invalid subscription ID in CLOSE");
|
||||||
@@ -951,26 +1126,109 @@ static int nostr_relay_callback(struct lws *wsi, enum lws_callback_reasons reaso
|
|||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case LWS_CALLBACK_SERVER_WRITEABLE:
|
||||||
|
// Handle message queue when socket becomes writeable
|
||||||
|
if (pss) {
|
||||||
|
process_message_queue(wsi, pss);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
case LWS_CALLBACK_CLOSED:
|
case LWS_CALLBACK_CLOSED:
|
||||||
DEBUG_TRACE("WebSocket connection closed");
|
DEBUG_TRACE("WebSocket connection closed");
|
||||||
DEBUG_LOG("WebSocket connection closed from %s", pss ? pss->client_ip : "unknown");
|
|
||||||
|
// Enhanced closure logging with detailed diagnostics
|
||||||
// Clean up session subscriptions
|
|
||||||
if (pss) {
|
if (pss) {
|
||||||
|
// Calculate connection duration
|
||||||
|
time_t now = time(NULL);
|
||||||
|
long duration = (pss->connection_established > 0) ?
|
||||||
|
(long)(now - pss->connection_established) : 0;
|
||||||
|
|
||||||
|
// Determine closure reason
|
||||||
|
const char* reason = "client_disconnect";
|
||||||
|
if (g_shutdown_flag || !g_server_running) {
|
||||||
|
reason = "server_shutdown";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format authentication status
|
||||||
|
char auth_status[80];
|
||||||
|
if (pss->authenticated && strlen(pss->authenticated_pubkey) > 0) {
|
||||||
|
// Show first 8 chars of pubkey for identification
|
||||||
|
snprintf(auth_status, sizeof(auth_status), "yes(%.8s...)", pss->authenticated_pubkey);
|
||||||
|
} else {
|
||||||
|
snprintf(auth_status, sizeof(auth_status), "no");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log comprehensive closure information
|
||||||
|
DEBUG_LOG("WebSocket CLOSED: ip=%s duration=%lds subscriptions=%d authenticated=%s reason=%s",
|
||||||
|
pss->client_ip,
|
||||||
|
duration,
|
||||||
|
pss->subscription_count,
|
||||||
|
auth_status,
|
||||||
|
reason);
|
||||||
|
|
||||||
|
// Clean up message queue to prevent memory leaks
|
||||||
|
while (pss->message_queue_head) {
|
||||||
|
struct message_queue_node* node = pss->message_queue_head;
|
||||||
|
pss->message_queue_head = node->next;
|
||||||
|
free(node->data);
|
||||||
|
free(node);
|
||||||
|
}
|
||||||
|
pss->message_queue_tail = NULL;
|
||||||
|
pss->message_queue_count = 0;
|
||||||
|
pss->writeable_requested = 0;
|
||||||
|
|
||||||
|
// Clean up session subscriptions - copy IDs first to avoid use-after-free
|
||||||
pthread_mutex_lock(&pss->session_lock);
|
pthread_mutex_lock(&pss->session_lock);
|
||||||
|
|
||||||
|
// First pass: collect subscription IDs safely
|
||||||
|
typedef struct temp_sub_id {
|
||||||
|
char id[SUBSCRIPTION_ID_MAX_LENGTH];
|
||||||
|
struct temp_sub_id* next;
|
||||||
|
} temp_sub_id_t;
|
||||||
|
|
||||||
|
temp_sub_id_t* temp_ids = NULL;
|
||||||
|
temp_sub_id_t* temp_tail = NULL;
|
||||||
|
int temp_count = 0;
|
||||||
|
|
||||||
struct subscription* sub = pss->subscriptions;
|
struct subscription* sub = pss->subscriptions;
|
||||||
while (sub) {
|
while (sub) {
|
||||||
struct subscription* next = sub->session_next;
|
if (sub->active) { // Only process active subscriptions
|
||||||
remove_subscription_from_manager(sub->id, wsi);
|
temp_sub_id_t* temp = malloc(sizeof(temp_sub_id_t));
|
||||||
sub = next;
|
if (temp) {
|
||||||
|
memcpy(temp->id, sub->id, SUBSCRIPTION_ID_MAX_LENGTH);
|
||||||
|
temp->id[SUBSCRIPTION_ID_MAX_LENGTH - 1] = '\0';
|
||||||
|
temp->next = NULL;
|
||||||
|
|
||||||
|
if (!temp_ids) {
|
||||||
|
temp_ids = temp;
|
||||||
|
temp_tail = temp;
|
||||||
|
} else {
|
||||||
|
temp_tail->next = temp;
|
||||||
|
temp_tail = temp;
|
||||||
|
}
|
||||||
|
temp_count++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sub = sub->session_next;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Clear session list immediately
|
||||||
pss->subscriptions = NULL;
|
pss->subscriptions = NULL;
|
||||||
pss->subscription_count = 0;
|
pss->subscription_count = 0;
|
||||||
|
|
||||||
pthread_mutex_unlock(&pss->session_lock);
|
pthread_mutex_unlock(&pss->session_lock);
|
||||||
|
|
||||||
|
// Second pass: remove from global manager using copied IDs
|
||||||
|
temp_sub_id_t* current_temp = temp_ids;
|
||||||
|
while (current_temp) {
|
||||||
|
temp_sub_id_t* next_temp = current_temp->next;
|
||||||
|
remove_subscription_from_manager(current_temp->id, wsi);
|
||||||
|
free(current_temp);
|
||||||
|
current_temp = next_temp;
|
||||||
|
}
|
||||||
pthread_mutex_destroy(&pss->session_lock);
|
pthread_mutex_destroy(&pss->session_lock);
|
||||||
|
} else {
|
||||||
|
DEBUG_LOG("WebSocket CLOSED: ip=unknown duration=0s subscriptions=0 authenticated=no reason=unknown");
|
||||||
}
|
}
|
||||||
DEBUG_TRACE("WebSocket connection cleanup complete");
|
DEBUG_TRACE("WebSocket connection cleanup complete");
|
||||||
break;
|
break;
|
||||||
@@ -1634,12 +1892,18 @@ int handle_count_message(const char* sub_id, cJSON* filters, struct lws *wsi, st
|
|||||||
char *count_str = cJSON_Print(count_response);
|
char *count_str = cJSON_Print(count_response);
|
||||||
if (count_str) {
|
if (count_str) {
|
||||||
size_t count_len = strlen(count_str);
|
size_t count_len = strlen(count_str);
|
||||||
unsigned char *buf = malloc(LWS_PRE + count_len);
|
|
||||||
if (buf) {
|
// DEBUG: Log WebSocket frame details before sending
|
||||||
memcpy(buf + LWS_PRE, count_str, count_len);
|
DEBUG_TRACE("WS_FRAME_SEND: type=COUNT len=%zu data=%.100s%s",
|
||||||
lws_write(wsi, buf + LWS_PRE, count_len, LWS_WRITE_TEXT);
|
count_len,
|
||||||
free(buf);
|
count_str,
|
||||||
|
count_len > 100 ? "..." : "");
|
||||||
|
|
||||||
|
// Queue message for proper libwebsockets pattern
|
||||||
|
if (queue_message(wsi, pss, count_str, count_len, LWS_WRITE_TEXT) != 0) {
|
||||||
|
DEBUG_ERROR("Failed to queue COUNT message");
|
||||||
}
|
}
|
||||||
|
|
||||||
free(count_str);
|
free(count_str);
|
||||||
}
|
}
|
||||||
cJSON_Delete(count_response);
|
cJSON_Delete(count_response);
|
||||||
|
|||||||
@@ -31,6 +31,14 @@
|
|||||||
#define MAX_SEARCH_LENGTH 256
|
#define MAX_SEARCH_LENGTH 256
|
||||||
#define MAX_TAG_VALUE_LENGTH 1024
|
#define MAX_TAG_VALUE_LENGTH 1024
|
||||||
|
|
||||||
|
// Message queue node for proper libwebsockets pattern
|
||||||
|
struct message_queue_node {
|
||||||
|
unsigned char* data; // Message data (with LWS_PRE space)
|
||||||
|
size_t length; // Message length (without LWS_PRE)
|
||||||
|
enum lws_write_protocol type; // LWS_WRITE_TEXT, etc.
|
||||||
|
struct message_queue_node* next; // Next node in queue
|
||||||
|
};
|
||||||
|
|
||||||
// Enhanced per-session data with subscription management, NIP-42 authentication, and rate limiting
|
// Enhanced per-session data with subscription management, NIP-42 authentication, and rate limiting
|
||||||
struct per_session_data {
|
struct per_session_data {
|
||||||
int authenticated;
|
int authenticated;
|
||||||
@@ -38,6 +46,7 @@ struct per_session_data {
|
|||||||
pthread_mutex_t session_lock; // Per-session thread safety
|
pthread_mutex_t session_lock; // Per-session thread safety
|
||||||
char client_ip[CLIENT_IP_MAX_LENGTH]; // Client IP for logging
|
char client_ip[CLIENT_IP_MAX_LENGTH]; // Client IP for logging
|
||||||
int subscription_count; // Number of subscriptions for this session
|
int subscription_count; // Number of subscriptions for this session
|
||||||
|
time_t connection_established; // When WebSocket connection was established
|
||||||
|
|
||||||
// NIP-42 Authentication State
|
// NIP-42 Authentication State
|
||||||
char authenticated_pubkey[65]; // Authenticated public key (64 hex + null)
|
char authenticated_pubkey[65]; // Authenticated public key (64 hex + null)
|
||||||
@@ -58,6 +67,12 @@ struct per_session_data {
|
|||||||
int malformed_request_count; // Count of malformed requests in current hour
|
int malformed_request_count; // Count of malformed requests in current hour
|
||||||
time_t malformed_request_window_start; // Start of current hour window
|
time_t malformed_request_window_start; // Start of current hour window
|
||||||
time_t malformed_request_blocked_until; // Time until blocked for malformed requests
|
time_t malformed_request_blocked_until; // Time until blocked for malformed requests
|
||||||
|
|
||||||
|
// Message queue for proper libwebsockets pattern (replaces single buffer)
|
||||||
|
struct message_queue_node* message_queue_head; // Head of message queue
|
||||||
|
struct message_queue_node* message_queue_tail; // Tail of message queue
|
||||||
|
int message_queue_count; // Number of messages in queue
|
||||||
|
int writeable_requested; // Flag: 1 if writeable callback requested
|
||||||
};
|
};
|
||||||
|
|
||||||
// NIP-11 HTTP session data structure for managing buffer lifetime
|
// NIP-11 HTTP session data structure for managing buffer lifetime
|
||||||
@@ -72,6 +87,10 @@ struct nip11_session_data {
|
|||||||
// Function declarations
|
// Function declarations
|
||||||
int start_websocket_relay(int port_override, int strict_port);
|
int start_websocket_relay(int port_override, int strict_port);
|
||||||
|
|
||||||
|
// Message queue functions for proper libwebsockets pattern
|
||||||
|
int queue_message(struct lws* wsi, struct per_session_data* pss, const char* message, size_t length, enum lws_write_protocol type);
|
||||||
|
int process_message_queue(struct lws* wsi, struct per_session_data* pss);
|
||||||
|
|
||||||
// Auth rules checking function from request_validator.c
|
// Auth rules checking function from request_validator.c
|
||||||
int check_database_auth_rules(const char *pubkey, const char *operation, const char *resource_hash);
|
int check_database_auth_rules(const char *pubkey, const char *operation, const char *resource_hash);
|
||||||
|
|
||||||
|
|||||||
40
systemd/c-relay-local.service
Normal file
40
systemd/c-relay-local.service
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=C Nostr Relay Server (Local Development)
|
||||||
|
Documentation=https://github.com/your-repo/c-relay
|
||||||
|
After=network.target
|
||||||
|
Wants=network-online.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=teknari
|
||||||
|
WorkingDirectory=/home/teknari/Storage/c_relay
|
||||||
|
Environment=DEBUG_LEVEL=0
|
||||||
|
ExecStart=/home/teknari/Storage/c_relay/crelay --port 7777 --debug-level=$DEBUG_LEVEL
|
||||||
|
Restart=always
|
||||||
|
RestartSec=5
|
||||||
|
StandardOutput=journal
|
||||||
|
StandardError=journal
|
||||||
|
SyslogIdentifier=c-relay-local
|
||||||
|
|
||||||
|
# Security settings (relaxed for local development)
|
||||||
|
NoNewPrivileges=true
|
||||||
|
ProtectSystem=strict
|
||||||
|
ProtectHome=true
|
||||||
|
ReadWritePaths=/home/teknari/Storage/c_relay
|
||||||
|
PrivateTmp=true
|
||||||
|
|
||||||
|
# Network security
|
||||||
|
PrivateNetwork=false
|
||||||
|
RestrictAddressFamilies=AF_INET AF_INET6
|
||||||
|
|
||||||
|
# Resource limits
|
||||||
|
LimitNOFILE=65536
|
||||||
|
LimitNPROC=4096
|
||||||
|
|
||||||
|
# Event-based configuration system
|
||||||
|
# No environment variables needed - all configuration is stored as Nostr events
|
||||||
|
# Database files (<relay_pubkey>.db) are created automatically in WorkingDirectory
|
||||||
|
# Admin keys are generated and displayed only during first startup
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
35
tests/ephemeral_test.sh
Executable file
35
tests/ephemeral_test.sh
Executable file
@@ -0,0 +1,35 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Simplified Ephemeral Event Test
|
||||||
|
# Tests that ephemeral events are broadcast to active subscriptions
|
||||||
|
|
||||||
|
echo "=== Generating Ephemeral Event (kind 20000) ==="
|
||||||
|
event=$(nak event --kind 20000 --content "test ephemeral event")
|
||||||
|
echo "$event"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "=== Testing Ephemeral Event Broadcast ==="
|
||||||
|
subscription='["REQ","test_sub",{"kinds":[20000],"limit":10}]'
|
||||||
|
echo "Subscription Filter:"
|
||||||
|
echo "$subscription"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
event_msg='["EVENT",'"$event"']'
|
||||||
|
echo "Event Message:"
|
||||||
|
echo "$event_msg"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "=== Relay Responses ==="
|
||||||
|
(
|
||||||
|
# Send subscription
|
||||||
|
printf "%s\n" "$subscription"
|
||||||
|
# Wait for subscription to establish
|
||||||
|
sleep 1
|
||||||
|
# Send ephemeral event on same connection
|
||||||
|
printf "%s\n" "$event_msg"
|
||||||
|
# Wait for responses
|
||||||
|
sleep 2
|
||||||
|
) | timeout 5 websocat ws://127.0.0.1:8888
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Test complete!"
|
||||||
63
tests/large_event_test.sh
Executable file
63
tests/large_event_test.sh
Executable file
@@ -0,0 +1,63 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Test script for posting large events (>4KB) to test partial write handling
|
||||||
|
# Uses nak to properly sign events with large content
|
||||||
|
|
||||||
|
RELAY_URL="ws://localhost:8888"
|
||||||
|
|
||||||
|
# Check if nak is installed
|
||||||
|
if ! command -v nak &> /dev/null; then
|
||||||
|
echo "Error: nak is not installed. Install with: go install github.com/fiatjaf/nak@latest"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate a test private key if not set
|
||||||
|
if [ -z "$NOSTR_PRIVATE_KEY" ]; then
|
||||||
|
echo "Generating temporary test key..."
|
||||||
|
export NOSTR_PRIVATE_KEY=$(nak key generate)
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "=== Large Event Test ==="
|
||||||
|
echo "Testing partial write handling with events >4KB"
|
||||||
|
echo "Relay: $RELAY_URL"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Test 1: 5KB event
|
||||||
|
echo "Test 1: Posting 5KB event..."
|
||||||
|
CONTENT_5KB=$(python3 -c "print('A' * 5000)")
|
||||||
|
echo "$CONTENT_5KB" | nak event -k 1 --content - $RELAY_URL
|
||||||
|
sleep 1
|
||||||
|
|
||||||
|
# Test 2: 10KB event
|
||||||
|
echo ""
|
||||||
|
echo "Test 2: Posting 10KB event..."
|
||||||
|
CONTENT_10KB=$(python3 -c "print('B' * 10000)")
|
||||||
|
echo "$CONTENT_10KB" | nak event -k 1 --content - $RELAY_URL
|
||||||
|
sleep 1
|
||||||
|
|
||||||
|
# Test 3: 20KB event
|
||||||
|
echo ""
|
||||||
|
echo "Test 3: Posting 20KB event..."
|
||||||
|
CONTENT_20KB=$(python3 -c "print('C' * 20000)")
|
||||||
|
echo "$CONTENT_20KB" | nak event -k 1 --content - $RELAY_URL
|
||||||
|
sleep 1
|
||||||
|
|
||||||
|
# Test 4: 50KB event (very large)
|
||||||
|
echo ""
|
||||||
|
echo "Test 4: Posting 50KB event..."
|
||||||
|
CONTENT_50KB=$(python3 -c "print('D' * 50000)")
|
||||||
|
echo "$CONTENT_50KB" | nak event -k 1 --content - $RELAY_URL
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Test Complete ==="
|
||||||
|
echo ""
|
||||||
|
echo "Check relay.log for:"
|
||||||
|
echo " - 'Queued partial write' messages (indicates buffering is working)"
|
||||||
|
echo " - 'write completed' messages (indicates retry succeeded)"
|
||||||
|
echo " - No 'Invalid frame header' errors"
|
||||||
|
echo ""
|
||||||
|
echo "To view logs in real-time:"
|
||||||
|
echo " tail -f relay.log | grep -E '(partial|write completed|Invalid frame)'"
|
||||||
|
echo ""
|
||||||
|
echo "To check if events were stored:"
|
||||||
|
echo " sqlite3 build/*.db 'SELECT id, length(content) as content_size FROM events ORDER BY created_at DESC LIMIT 4;'"
|
||||||
53
tests/post_events.sh
Executable file
53
tests/post_events.sh
Executable file
@@ -0,0 +1,53 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Test script to post kind 1 events to the relay every second
|
||||||
|
# Cycles through three different secret keys
|
||||||
|
# Content includes current timestamp
|
||||||
|
#
|
||||||
|
# Usage: ./post_events.sh <relay_url>
|
||||||
|
# Example: ./post_events.sh ws://localhost:8888
|
||||||
|
# Example: ./post_events.sh wss://relay.laantungir.net
|
||||||
|
|
||||||
|
# Check if relay URL is provided
|
||||||
|
if [ -z "$1" ]; then
|
||||||
|
echo "Error: Relay URL is required"
|
||||||
|
echo "Usage: $0 <relay_url>"
|
||||||
|
echo "Example: $0 ws://localhost:8888"
|
||||||
|
echo "Example: $0 wss://relay.laantungir.net"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Array of secret keys to cycle through
|
||||||
|
SECRET_KEYS=(
|
||||||
|
"3fdd8227a920c2385559400b2b14e464f22e80df312a73cc7a86e1d7e91d608f"
|
||||||
|
"a156011cd65b71f84b4a488ac81687f2aed57e490b31c28f58195d787030db60"
|
||||||
|
"1618aaa21f5bd45c5ffede0d9a60556db67d4a046900e5f66b0bae5c01c801fb"
|
||||||
|
)
|
||||||
|
|
||||||
|
RELAY_URL="$1"
|
||||||
|
KEY_INDEX=0
|
||||||
|
|
||||||
|
echo "Starting event posting test to $RELAY_URL"
|
||||||
|
echo "Press Ctrl+C to stop"
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
# Get current timestamp
|
||||||
|
TIMESTAMP=$(date +"%Y-%m-%d %H:%M:%S UTC")
|
||||||
|
|
||||||
|
# Get current secret key
|
||||||
|
CURRENT_KEY=${SECRET_KEYS[$KEY_INDEX]}
|
||||||
|
|
||||||
|
# Create content with timestamp
|
||||||
|
CONTENT="Test event at $TIMESTAMP"
|
||||||
|
|
||||||
|
echo "[$TIMESTAMP] Posting event with key ${KEY_INDEX}: ${CURRENT_KEY:0:16}..."
|
||||||
|
|
||||||
|
# Post event using nak
|
||||||
|
nak event -c "$CONTENT" --sec "$CURRENT_KEY" "$RELAY_URL"
|
||||||
|
|
||||||
|
# Cycle to next key
|
||||||
|
KEY_INDEX=$(( (KEY_INDEX + 1) % ${#SECRET_KEYS[@]} ))
|
||||||
|
|
||||||
|
# Wait 1 second
|
||||||
|
sleep .2
|
||||||
|
done
|
||||||
@@ -1,203 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Rate Limiting Test Suite for C-Relay
|
|
||||||
# Tests rate limiting and abuse prevention mechanisms
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# Configuration
|
|
||||||
RELAY_HOST="127.0.0.1"
|
|
||||||
RELAY_PORT="8888"
|
|
||||||
TEST_TIMEOUT=15
|
|
||||||
|
|
||||||
# Colors for output
|
|
||||||
RED='\033[0;31m'
|
|
||||||
GREEN='\033[0;32m'
|
|
||||||
YELLOW='\033[1;33m'
|
|
||||||
BLUE='\033[0;34m'
|
|
||||||
NC='\033[0m' # No Color
|
|
||||||
|
|
||||||
# Test counters
|
|
||||||
TOTAL_TESTS=0
|
|
||||||
PASSED_TESTS=0
|
|
||||||
FAILED_TESTS=0
|
|
||||||
|
|
||||||
# Function to test rate limiting
|
|
||||||
test_rate_limiting() {
|
|
||||||
local description="$1"
|
|
||||||
local message="$2"
|
|
||||||
local burst_count="${3:-10}"
|
|
||||||
local expected_limited="${4:-false}"
|
|
||||||
|
|
||||||
TOTAL_TESTS=$((TOTAL_TESTS + 1))
|
|
||||||
|
|
||||||
echo -n "Testing $description... "
|
|
||||||
|
|
||||||
local rate_limited=false
|
|
||||||
local success_count=0
|
|
||||||
local error_count=0
|
|
||||||
|
|
||||||
# Send burst of messages
|
|
||||||
for i in $(seq 1 "$burst_count"); do
|
|
||||||
local response
|
|
||||||
response=$(echo "$message" | timeout 2 websocat -B 1048576 ws://$RELAY_HOST:$RELAY_PORT 2>/dev/null | head -1 || echo 'TIMEOUT')
|
|
||||||
|
|
||||||
if [[ "$response" == *"rate limit"* ]] || [[ "$response" == *"too many"* ]] || [[ "$response" == *"TOO_MANY"* ]]; then
|
|
||||||
rate_limited=true
|
|
||||||
elif [[ "$response" == *"EOSE"* ]] || [[ "$response" == *"EVENT"* ]] || [[ "$response" == *"OK"* ]]; then
|
|
||||||
((success_count++))
|
|
||||||
else
|
|
||||||
((error_count++))
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Small delay between requests
|
|
||||||
sleep 0.05
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ "$expected_limited" == "true" ]]; then
|
|
||||||
if [[ "$rate_limited" == "true" ]]; then
|
|
||||||
echo -e "${GREEN}PASSED${NC} - Rate limiting triggered as expected"
|
|
||||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
|
||||||
return 0
|
|
||||||
else
|
|
||||||
echo -e "${RED}FAILED${NC} - Rate limiting not triggered (expected)"
|
|
||||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ "$rate_limited" == "false" ]]; then
|
|
||||||
echo -e "${GREEN}PASSED${NC} - No rate limiting for normal traffic"
|
|
||||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
|
||||||
return 0
|
|
||||||
else
|
|
||||||
echo -e "${YELLOW}UNCERTAIN${NC} - Unexpected rate limiting"
|
|
||||||
PASSED_TESTS=$((PASSED_TESTS + 1)) # Count as passed since it's conservative
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Function to test sustained load
|
|
||||||
test_sustained_load() {
|
|
||||||
local description="$1"
|
|
||||||
local message="$2"
|
|
||||||
local duration="${3:-10}"
|
|
||||||
|
|
||||||
TOTAL_TESTS=$((TOTAL_TESTS + 1))
|
|
||||||
|
|
||||||
echo -n "Testing $description... "
|
|
||||||
|
|
||||||
local start_time
|
|
||||||
start_time=$(date +%s)
|
|
||||||
local rate_limited=false
|
|
||||||
local total_requests=0
|
|
||||||
local successful_requests=0
|
|
||||||
|
|
||||||
while [[ $(($(date +%s) - start_time)) -lt duration ]]; do
|
|
||||||
((total_requests++))
|
|
||||||
local response
|
|
||||||
response=$(echo "$message" | timeout 1 websocat -B 1048576 ws://$RELAY_HOST:$RELAY_PORT 2>/dev/null | head -1 || echo 'TIMEOUT')
|
|
||||||
|
|
||||||
if [[ "$response" == *"rate limit"* ]] || [[ "$response" == *"too many"* ]] || [[ "$response" == *"TOO_MANY"* ]]; then
|
|
||||||
rate_limited=true
|
|
||||||
elif [[ "$response" == *"EOSE"* ]] || [[ "$response" == *"EVENT"* ]] || [[ "$response" == *"OK"* ]]; then
|
|
||||||
((successful_requests++))
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Small delay to avoid overwhelming
|
|
||||||
sleep 0.1
|
|
||||||
done
|
|
||||||
|
|
||||||
local success_rate=0
|
|
||||||
if [[ $total_requests -gt 0 ]]; then
|
|
||||||
success_rate=$((successful_requests * 100 / total_requests))
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$rate_limited" == "true" ]]; then
|
|
||||||
echo -e "${GREEN}PASSED${NC} - Rate limiting activated under sustained load (${success_rate}% success rate)"
|
|
||||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
|
||||||
return 0
|
|
||||||
else
|
|
||||||
echo -e "${YELLOW}UNCERTAIN${NC} - No rate limiting detected (${success_rate}% success rate)"
|
|
||||||
# This might be acceptable if rate limiting is very permissive
|
|
||||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
echo "=========================================="
|
|
||||||
echo "C-Relay Rate Limiting Test Suite"
|
|
||||||
echo "=========================================="
|
|
||||||
echo "Testing rate limiting against relay at ws://$RELAY_HOST:$RELAY_PORT"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Test basic connectivity first
|
|
||||||
echo "=== Basic Connectivity Test ==="
|
|
||||||
test_rate_limiting "Basic connectivity" '["REQ","rate_test",{}]' 1 false
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
echo "=== Burst Request Testing ==="
|
|
||||||
# Test rapid succession of requests
|
|
||||||
test_rate_limiting "Rapid REQ messages" '["REQ","burst_req_'$(date +%s%N)'",{}]' 20 true
|
|
||||||
test_rate_limiting "Rapid COUNT messages" '["COUNT","burst_count_'$(date +%s%N)'",{}]' 20 true
|
|
||||||
test_rate_limiting "Rapid CLOSE messages" '["CLOSE","burst_close"]' 20 true
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
echo "=== Malformed Message Rate Limiting ==="
|
|
||||||
# Test if malformed messages trigger rate limiting faster
|
|
||||||
test_rate_limiting "Malformed JSON burst" '["REQ","malformed"' 15 true
|
|
||||||
test_rate_limiting "Invalid message type burst" '["INVALID","test",{}]' 15 true
|
|
||||||
test_rate_limiting "Empty message burst" '[]' 15 true
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
echo "=== Sustained Load Testing ==="
|
|
||||||
# Test sustained moderate load
|
|
||||||
test_sustained_load "Sustained REQ load" '["REQ","sustained_'$(date +%s%N)'",{}]' 10
|
|
||||||
test_sustained_load "Sustained COUNT load" '["COUNT","sustained_count_'$(date +%s%N)'",{}]' 10
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
echo "=== Filter Complexity Testing ==="
|
|
||||||
# Test if complex filters trigger rate limiting
|
|
||||||
test_rate_limiting "Complex filter burst" '["REQ","complex_'$(date +%s%N)'",{"authors":["a","b","c"],"kinds":[1,2,3],"#e":["x","y","z"],"#p":["m","n","o"],"since":1000000000,"until":2000000000,"limit":100}]' 10 true
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
echo "=== Subscription Management Testing ==="
|
|
||||||
# Test subscription creation/deletion rate limiting
|
|
||||||
echo -n "Testing subscription churn... "
|
|
||||||
local churn_test_passed=true
|
|
||||||
for i in $(seq 1 25); do
|
|
||||||
# Create subscription
|
|
||||||
echo "[\"REQ\",\"churn_${i}_$(date +%s%N)\",{}]" | timeout 1 websocat -B 1048576 ws://$RELAY_HOST:$RELAY_PORT >/dev/null 2>&1 || true
|
|
||||||
|
|
||||||
# Close subscription
|
|
||||||
echo "[\"CLOSE\",\"churn_${i}_*\"]" | timeout 1 websocat -B 1048576 ws://$RELAY_HOST:$RELAY_PORT >/dev/null 2>&1 || true
|
|
||||||
|
|
||||||
sleep 0.05
|
|
||||||
done
|
|
||||||
|
|
||||||
# Check if relay is still responsive
|
|
||||||
if echo 'ping' | timeout 2 websocat -n1 ws://$RELAY_HOST:$RELAY_PORT >/dev/null 2>&1; then
|
|
||||||
echo -e "${GREEN}PASSED${NC} - Subscription churn handled"
|
|
||||||
TOTAL_TESTS=$((TOTAL_TESTS + 1))
|
|
||||||
PASSED_TESTS=$((PASSED_TESTS + 1))
|
|
||||||
else
|
|
||||||
echo -e "${RED}FAILED${NC} - Relay unresponsive after subscription churn"
|
|
||||||
TOTAL_TESTS=$((TOTAL_TESTS + 1))
|
|
||||||
FAILED_TESTS=$((FAILED_TESTS + 1))
|
|
||||||
fi
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
echo "=== Test Results ==="
|
|
||||||
echo "Total tests: $TOTAL_TESTS"
|
|
||||||
echo -e "Passed: ${GREEN}$PASSED_TESTS${NC}"
|
|
||||||
echo -e "Failed: ${RED}$FAILED_TESTS${NC}"
|
|
||||||
|
|
||||||
if [[ $FAILED_TESTS -eq 0 ]]; then
|
|
||||||
echo -e "${GREEN}✓ All rate limiting tests passed!${NC}"
|
|
||||||
echo "Rate limiting appears to be working correctly."
|
|
||||||
exit 0
|
|
||||||
else
|
|
||||||
echo -e "${RED}✗ Some rate limiting tests failed!${NC}"
|
|
||||||
echo "Rate limiting may not be properly configured."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
448
tests/sql_test.sh
Executable file
448
tests/sql_test.sh
Executable file
@@ -0,0 +1,448 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# SQL Query Admin API Test Script
|
||||||
|
# Tests the sql_query command functionality
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
RELAY_URL="ws://localhost:8888"
|
||||||
|
ADMIN_PRIVKEY="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||||
|
ADMIN_PUBKEY="6a04ab98d9e4774ad806e302dddeb63bea16b5cb5f223ee77478e861bb583eb3"
|
||||||
|
RELAY_PUBKEY="4f355bdcb7cc0af728ef3cceb9615d90684bb5b2ca5f859ab0f0b704075871aa"
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
# Test counters
|
||||||
|
TOTAL_TESTS=0
|
||||||
|
PASSED_TESTS=0
|
||||||
|
FAILED_TESTS=0
|
||||||
|
|
||||||
|
# Helper functions
|
||||||
|
print_test() {
|
||||||
|
echo -e "${YELLOW}TEST: $1${NC}"
|
||||||
|
TOTAL_TESTS=$((TOTAL_TESTS + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
print_pass() {
|
||||||
|
echo -e "${GREEN}✓ PASS: $1${NC}"
|
||||||
|
PASSED_TESTS=$((PASSED_TESTS + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
print_fail() {
|
||||||
|
echo -e "${RED}✗ FAIL: $1${NC}"
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if nak is installed
|
||||||
|
check_nak() {
|
||||||
|
if ! command -v nak &> /dev/null; then
|
||||||
|
echo -e "${RED}ERROR: nak command not found. Please install nak first.${NC}"
|
||||||
|
echo -e "${RED}Visit: https://github.com/fiatjaf/nak${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo -e "${GREEN}✓ nak is available${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send SQL query command via WebSocket using nak
|
||||||
|
send_sql_query() {
|
||||||
|
local query="$1"
|
||||||
|
local description="$2"
|
||||||
|
|
||||||
|
echo -n "Testing $description... "
|
||||||
|
|
||||||
|
# Create the admin command
|
||||||
|
COMMAND="[\"sql_query\", \"$query\"]"
|
||||||
|
|
||||||
|
# Encrypt the command using NIP-44
|
||||||
|
ENCRYPTED_COMMAND=$(nak encrypt "$COMMAND" \
|
||||||
|
--sec "$ADMIN_PRIVKEY" \
|
||||||
|
--recipient-pubkey "$RELAY_PUBKEY" 2>/dev/null)
|
||||||
|
|
||||||
|
if [ -z "$ENCRYPTED_COMMAND" ]; then
|
||||||
|
echo -e "${RED}FAILED${NC} - Failed to encrypt admin command"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create admin event
|
||||||
|
ADMIN_EVENT=$(nak event \
|
||||||
|
--kind 23456 \
|
||||||
|
--content "$ENCRYPTED_COMMAND" \
|
||||||
|
--sec "$ADMIN_PRIVKEY" \
|
||||||
|
--tag "p=$RELAY_PUBKEY" 2>/dev/null)
|
||||||
|
|
||||||
|
if [ -z "$ADMIN_EVENT" ]; then
|
||||||
|
echo -e "${RED}FAILED${NC} - Failed to create admin event"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "=== SENT EVENT ==="
|
||||||
|
echo "$ADMIN_EVENT"
|
||||||
|
echo "==================="
|
||||||
|
|
||||||
|
# Send SQL query event via WebSocket
|
||||||
|
local response
|
||||||
|
response=$(echo "$ADMIN_EVENT" | timeout 10 websocat -B 1048576 "$RELAY_URL" 2>/dev/null | head -3 || echo 'TIMEOUT')
|
||||||
|
|
||||||
|
echo "=== RECEIVED RESPONSE ==="
|
||||||
|
echo "$response"
|
||||||
|
echo "=========================="
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
echo -e "${RED}FAILED${NC} - Connection timeout"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "$response" # Return the response for further processing
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test functions
|
||||||
|
test_valid_select() {
|
||||||
|
print_test "Valid SELECT query"
|
||||||
|
local response=$(send_sql_query "SELECT * FROM events LIMIT 1" "valid SELECT query")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"query_type":"sql_query"' && echo "$response" | grep -q '"row_count"'; then
|
||||||
|
print_pass "Valid SELECT accepted and executed"
|
||||||
|
else
|
||||||
|
print_fail "Valid SELECT failed: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_select_count() {
|
||||||
|
print_test "SELECT COUNT(*) query"
|
||||||
|
local response=$(send_sql_query "SELECT COUNT(*) FROM events" "COUNT query")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"query_type":"sql_query"' && echo "$response" | grep -q '"row_count"'; then
|
||||||
|
print_pass "COUNT query executed successfully"
|
||||||
|
else
|
||||||
|
print_fail "COUNT query failed: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_blocked_insert() {
|
||||||
|
print_test "INSERT statement blocked"
|
||||||
|
local response=$(send_sql_query "INSERT INTO events VALUES ('id', 'pubkey', 1234567890, 1, 'content', 'sig')" "INSERT blocking")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||||
|
print_pass "INSERT correctly blocked"
|
||||||
|
else
|
||||||
|
print_fail "INSERT not blocked: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_blocked_update() {
|
||||||
|
print_test "UPDATE statement blocked"
|
||||||
|
local response=$(send_sql_query "UPDATE events SET content = 'test' WHERE id = 'abc123'" "UPDATE blocking")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||||
|
print_pass "UPDATE correctly blocked"
|
||||||
|
else
|
||||||
|
print_fail "UPDATE not blocked: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_blocked_delete() {
|
||||||
|
print_test "DELETE statement blocked"
|
||||||
|
local response=$(send_sql_query "DELETE FROM events WHERE id = 'abc123'" "DELETE blocking")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||||
|
print_pass "DELETE correctly blocked"
|
||||||
|
else
|
||||||
|
print_fail "DELETE not blocked: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_blocked_drop() {
|
||||||
|
print_test "DROP statement blocked"
|
||||||
|
local response=$(send_sql_query "DROP TABLE events" "DROP blocking")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||||
|
print_pass "DROP correctly blocked"
|
||||||
|
else
|
||||||
|
print_fail "DROP not blocked: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_blocked_create() {
|
||||||
|
print_test "CREATE statement blocked"
|
||||||
|
local response=$(send_sql_query "CREATE TABLE test (id TEXT)" "CREATE blocking")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||||
|
print_pass "CREATE correctly blocked"
|
||||||
|
else
|
||||||
|
print_fail "CREATE not blocked: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_blocked_alter() {
|
||||||
|
print_test "ALTER statement blocked"
|
||||||
|
local response=$(send_sql_query "ALTER TABLE events ADD COLUMN test TEXT" "ALTER blocking")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||||
|
print_pass "ALTER correctly blocked"
|
||||||
|
else
|
||||||
|
print_fail "ALTER not blocked: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_blocked_pragma() {
|
||||||
|
print_test "PRAGMA statement blocked"
|
||||||
|
local response=$(send_sql_query "PRAGMA table_info(events)" "PRAGMA blocking")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"status":"error"' && echo "$response" | grep -q '"error_type":"blocked_statement"'; then
|
||||||
|
print_pass "PRAGMA correctly blocked"
|
||||||
|
else
|
||||||
|
print_fail "PRAGMA not blocked: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_select_with_where() {
|
||||||
|
print_test "SELECT with WHERE clause"
|
||||||
|
local response=$(send_sql_query "SELECT id, kind FROM events WHERE kind = 1 LIMIT 5" "WHERE clause query")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"query_type":"sql_query"'; then
|
||||||
|
print_pass "WHERE clause query executed"
|
||||||
|
else
|
||||||
|
print_fail "WHERE clause query failed: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_select_with_join() {
|
||||||
|
print_test "SELECT with JOIN"
|
||||||
|
local response=$(send_sql_query "SELECT e.id, e.kind, s.events_sent FROM events e LEFT JOIN active_subscriptions_log s ON e.id = s.subscription_id LIMIT 3" "JOIN query")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"query_type":"sql_query"'; then
|
||||||
|
print_pass "JOIN query executed"
|
||||||
|
else
|
||||||
|
print_fail "JOIN query failed: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_select_views() {
|
||||||
|
print_test "SELECT from views"
|
||||||
|
local response=$(send_sql_query "SELECT * FROM event_kinds_view LIMIT 5" "view query")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"query_type":"sql_query"'; then
|
||||||
|
print_pass "View query executed"
|
||||||
|
else
|
||||||
|
print_fail "View query failed: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_nonexistent_table() {
|
||||||
|
print_test "Query nonexistent table"
|
||||||
|
local response=$(send_sql_query "SELECT * FROM nonexistent_table" "nonexistent table")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"status":"error"'; then
|
||||||
|
print_pass "Nonexistent table error handled correctly"
|
||||||
|
else
|
||||||
|
print_fail "Nonexistent table error not handled: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_invalid_syntax() {
|
||||||
|
print_test "Invalid SQL syntax"
|
||||||
|
local response=$(send_sql_query "SELECT * FROM events WHERE" "invalid syntax")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"status":"error"'; then
|
||||||
|
print_pass "Invalid syntax error handled"
|
||||||
|
else
|
||||||
|
print_fail "Invalid syntax not handled: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_request_id_correlation() {
|
||||||
|
print_test "Request ID correlation"
|
||||||
|
local response=$(send_sql_query "SELECT * FROM events LIMIT 1" "request ID correlation")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"request_id"'; then
|
||||||
|
print_pass "Request ID included in response"
|
||||||
|
else
|
||||||
|
print_fail "Request ID missing from response: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_response_format() {
|
||||||
|
print_test "Response format validation"
|
||||||
|
local response=$(send_sql_query "SELECT * FROM events LIMIT 1" "response format")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"query_type":"sql_query"' &&
|
||||||
|
echo "$response" | grep -q '"timestamp"' &&
|
||||||
|
echo "$response" | grep -q '"execution_time_ms"' &&
|
||||||
|
echo "$response" | grep -q '"row_count"' &&
|
||||||
|
echo "$response" | grep -q '"columns"' &&
|
||||||
|
echo "$response" | grep -q '"rows"'; then
|
||||||
|
print_pass "Response format is valid"
|
||||||
|
else
|
||||||
|
print_fail "Response format invalid: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
test_empty_result() {
|
||||||
|
print_test "Empty result set"
|
||||||
|
local response=$(send_sql_query "SELECT * FROM events WHERE kind = 99999" "empty result")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
FAILED_TESTS=$((FAILED_TESTS + 1))
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if echo "$response" | grep -q '"query_type":"sql_query"'; then
|
||||||
|
print_pass "Empty result handled correctly"
|
||||||
|
else
|
||||||
|
print_fail "Empty result not handled: $response"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "=========================================="
|
||||||
|
echo "C-Relay SQL Query Admin API Testing Suite"
|
||||||
|
echo "=========================================="
|
||||||
|
echo "Testing SQL query functionality at $RELAY_URL"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check prerequisites
|
||||||
|
check_nak
|
||||||
|
|
||||||
|
# Test basic connectivity first
|
||||||
|
echo "=== Basic Connectivity Test ==="
|
||||||
|
print_test "Basic connectivity"
|
||||||
|
response=$(send_sql_query "SELECT 1" "basic connectivity")
|
||||||
|
|
||||||
|
if [[ "$response" == *"TIMEOUT"* ]]; then
|
||||||
|
echo -e "${RED}FAILED${NC} - Cannot connect to relay at $RELAY_URL"
|
||||||
|
echo "Make sure the relay is running and accessible."
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
print_pass "Relay connection established"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Run test suites
|
||||||
|
echo "=== Query Validation Tests ==="
|
||||||
|
test_valid_select
|
||||||
|
test_select_count
|
||||||
|
test_blocked_insert
|
||||||
|
test_blocked_update
|
||||||
|
test_blocked_delete
|
||||||
|
test_blocked_drop
|
||||||
|
test_blocked_create
|
||||||
|
test_blocked_alter
|
||||||
|
test_blocked_pragma
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "=== Query Execution Tests ==="
|
||||||
|
test_select_with_where
|
||||||
|
test_select_with_join
|
||||||
|
test_select_views
|
||||||
|
test_empty_result
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "=== Error Handling Tests ==="
|
||||||
|
test_nonexistent_table
|
||||||
|
test_invalid_syntax
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "=== Response Format Tests ==="
|
||||||
|
test_request_id_correlation
|
||||||
|
test_response_format
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo "=== Test Results ==="
|
||||||
|
echo "Total tests: $TOTAL_TESTS"
|
||||||
|
echo -e "Passed: ${GREEN}$PASSED_TESTS${NC}"
|
||||||
|
echo -e "Failed: ${RED}$FAILED_TESTS${NC}"
|
||||||
|
|
||||||
|
if [[ $FAILED_TESTS -eq 0 ]]; then
|
||||||
|
echo -e "${GREEN}✓ All SQL query tests passed!${NC}"
|
||||||
|
echo "SQL query admin API is working correctly."
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo -e "${RED}✗ Some SQL query tests failed!${NC}"
|
||||||
|
echo "SQL query admin API may have issues."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
2025-10-11 13:46:17 - ==========================================
|
|
||||||
2025-10-11 13:46:17 - C-Relay Comprehensive Test Suite Runner
|
|
||||||
2025-10-11 13:46:17 - ==========================================
|
|
||||||
2025-10-11 13:46:17 - Relay URL: ws://127.0.0.1:8888
|
|
||||||
2025-10-11 13:46:17 - Log file: test_results_20251011_134617.log
|
|
||||||
2025-10-11 13:46:17 - Report file: test_report_20251011_134617.html
|
|
||||||
2025-10-11 13:46:17 -
|
|
||||||
2025-10-11 13:46:17 - Checking relay status at ws://127.0.0.1:8888...
|
|
||||||
2025-10-11 13:46:17 - \033[0;32m✓ Relay HTTP endpoint is accessible\033[0m
|
|
||||||
2025-10-11 13:46:17 -
|
|
||||||
2025-10-11 13:46:17 - Starting comprehensive test execution...
|
|
||||||
2025-10-11 13:46:17 -
|
|
||||||
2025-10-11 13:46:17 - \033[0;34m=== SECURITY TEST SUITES ===\033[0m
|
|
||||||
2025-10-11 13:46:17 - ==========================================
|
|
||||||
2025-10-11 13:46:17 - Running Test Suite: SQL Injection Tests
|
|
||||||
2025-10-11 13:46:17 - Description: Comprehensive SQL injection vulnerability testing
|
|
||||||
2025-10-11 13:46:17 - ==========================================
|
|
||||||
2025-10-11 13:46:17 - \033[0;31mERROR: Test script tests/sql_injection_tests.sh not found\033[0m
|
|
||||||
@@ -1,629 +0,0 @@
|
|||||||
2025-10-11 13:48:07 - ==========================================
|
|
||||||
2025-10-11 13:48:07 - C-Relay Comprehensive Test Suite Runner
|
|
||||||
2025-10-11 13:48:07 - ==========================================
|
|
||||||
2025-10-11 13:48:07 - Relay URL: ws://127.0.0.1:8888
|
|
||||||
2025-10-11 13:48:07 - Log file: test_results_20251011_134807.log
|
|
||||||
2025-10-11 13:48:07 - Report file: test_report_20251011_134807.html
|
|
||||||
2025-10-11 13:48:07 -
|
|
||||||
2025-10-11 13:48:07 - Checking relay status at ws://127.0.0.1:8888...
|
|
||||||
2025-10-11 13:48:07 - \033[0;32m✓ Relay HTTP endpoint is accessible\033[0m
|
|
||||||
2025-10-11 13:48:07 -
|
|
||||||
2025-10-11 13:48:07 - Starting comprehensive test execution...
|
|
||||||
2025-10-11 13:48:07 -
|
|
||||||
2025-10-11 13:48:07 - \033[0;34m=== SECURITY TEST SUITES ===\033[0m
|
|
||||||
2025-10-11 13:48:07 - ==========================================
|
|
||||||
2025-10-11 13:48:07 - Running Test Suite: SQL Injection Tests
|
|
||||||
2025-10-11 13:48:07 - Description: Comprehensive SQL injection vulnerability testing
|
|
||||||
2025-10-11 13:48:07 - ==========================================
|
|
||||||
==========================================
|
|
||||||
C-Relay SQL Injection Test Suite
|
|
||||||
==========================================
|
|
||||||
Testing against relay at ws://127.0.0.1:8888
|
|
||||||
|
|
||||||
=== Basic Connectivity Test ===
|
|
||||||
Testing Basic connectivity... [0;32mPASSED[0m - Valid query works
|
|
||||||
|
|
||||||
=== Authors Filter SQL Injection Tests ===
|
|
||||||
Testing Authors filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== IDs Filter SQL Injection Tests ===
|
|
||||||
Testing IDs filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Kinds Filter SQL Injection Tests ===
|
|
||||||
Testing Kinds filter with string injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Kinds filter with negative value... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Kinds filter with very large value... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Search Filter SQL Injection Tests ===
|
|
||||||
Testing Search filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing Search filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing Search filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing Search filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing Search filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Tag Filter SQL Injection Tests ===
|
|
||||||
Testing #e tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
|
|
||||||
=== Timestamp Filter SQL Injection Tests ===
|
|
||||||
Testing Since parameter injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Until parameter injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Limit Parameter SQL Injection Tests ===
|
|
||||||
Testing Limit parameter injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Limit with UNION... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Complex Multi-Filter SQL Injection Tests ===
|
|
||||||
Testing Multi-filter with authors injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Multi-filter with search injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Multi-filter with tag injection... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
|
|
||||||
=== COUNT Message SQL Injection Tests ===
|
|
||||||
Testing COUNT with authors payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing COUNT with authors payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: #... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing COUNT with authors payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing COUNT with authors payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing COUNT with authors payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Edge Case SQL Injection Tests ===
|
|
||||||
Testing Empty string injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Null byte injection... [0;32mPASSED[0m - SQL injection blocked (silently rejected)
|
|
||||||
Testing Unicode injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Very long injection payload... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Subscription ID SQL Injection Tests ===
|
|
||||||
Testing Subscription ID injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Subscription ID with quotes... [0;32mPASSED[0m - SQL injection blocked (silently rejected)
|
|
||||||
|
|
||||||
=== CLOSE Message SQL Injection Tests ===
|
|
||||||
Testing CLOSE with injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Test Results ===
|
|
||||||
Total tests: 318
|
|
||||||
Passed: [0;32m318[0m
|
|
||||||
Failed: [0;31m0[0m
|
|
||||||
[0;32m✓ All SQL injection tests passed![0m
|
|
||||||
The relay appears to be protected against SQL injection attacks.
|
|
||||||
2025-10-11 13:48:30 - \033[0;32m✓ SQL Injection Tests PASSED\033[0m (Duration: 23s)
|
|
||||||
2025-10-11 13:48:30 - ==========================================
|
|
||||||
2025-10-11 13:48:30 - Running Test Suite: Filter Validation Tests
|
|
||||||
2025-10-11 13:48:30 - Description: Input validation for REQ and COUNT messages
|
|
||||||
2025-10-11 13:48:30 - ==========================================
|
|
||||||
=== C-Relay Filter Validation Tests ===
|
|
||||||
Testing against relay at ws://127.0.0.1:8888
|
|
||||||
|
|
||||||
Testing Valid REQ message... [0;32mPASSED[0m
|
|
||||||
Testing Valid COUNT message... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Filter Array Validation ===
|
|
||||||
Testing Non-object filter... [0;32mPASSED[0m
|
|
||||||
Testing Too many filters... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Authors Validation ===
|
|
||||||
Testing Invalid author type... [0;32mPASSED[0m
|
|
||||||
Testing Invalid author hex... [0;32mPASSED[0m
|
|
||||||
Testing Too many authors... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing IDs Validation ===
|
|
||||||
Testing Invalid ID type... [0;32mPASSED[0m
|
|
||||||
Testing Invalid ID hex... [0;32mPASSED[0m
|
|
||||||
Testing Too many IDs... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Kinds Validation ===
|
|
||||||
Testing Invalid kind type... [0;32mPASSED[0m
|
|
||||||
Testing Negative kind... [0;32mPASSED[0m
|
|
||||||
Testing Too large kind... [0;32mPASSED[0m
|
|
||||||
Testing Too many kinds... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Timestamp Validation ===
|
|
||||||
Testing Invalid since type... [0;32mPASSED[0m
|
|
||||||
Testing Negative since... [0;32mPASSED[0m
|
|
||||||
Testing Invalid until type... [0;32mPASSED[0m
|
|
||||||
Testing Negative until... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Limit Validation ===
|
|
||||||
Testing Invalid limit type... [0;32mPASSED[0m
|
|
||||||
Testing Negative limit... [0;32mPASSED[0m
|
|
||||||
Testing Too large limit... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Search Validation ===
|
|
||||||
Testing Invalid search type... [0;32mPASSED[0m
|
|
||||||
Testing Search too long... [0;32mPASSED[0m
|
|
||||||
Testing Search SQL injection... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Tag Filter Validation ===
|
|
||||||
Testing Invalid tag filter type... [0;32mPASSED[0m
|
|
||||||
Testing Too many tag values... [0;32mPASSED[0m
|
|
||||||
Testing Tag value too long... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Rate Limiting ===
|
|
||||||
Testing rate limiting with malformed requests... [1;33mUNCERTAIN[0m - Rate limiting may not have triggered (this could be normal)
|
|
||||||
|
|
||||||
=== Test Results ===
|
|
||||||
Total tests: 28
|
|
||||||
Passed: [0;32m28[0m
|
|
||||||
Failed: [0;31m0[0m
|
|
||||||
[0;32mAll tests passed![0m
|
|
||||||
2025-10-11 13:48:35 - \033[0;32m✓ Filter Validation Tests PASSED\033[0m (Duration: 5s)
|
|
||||||
2025-10-11 13:48:35 - ==========================================
|
|
||||||
2025-10-11 13:48:35 - Running Test Suite: Subscription Validation Tests
|
|
||||||
2025-10-11 13:48:35 - Description: Subscription ID and message validation
|
|
||||||
2025-10-11 13:48:35 - ==========================================
|
|
||||||
Testing subscription ID validation fixes...
|
|
||||||
Testing malformed subscription IDs...
|
|
||||||
Valid ID test: Success
|
|
||||||
Testing CLOSE message validation...
|
|
||||||
CLOSE valid ID test: Success
|
|
||||||
Subscription validation tests completed.
|
|
||||||
2025-10-11 13:48:36 - \033[0;32m✓ Subscription Validation Tests PASSED\033[0m (Duration: 1s)
|
|
||||||
2025-10-11 13:48:36 - ==========================================
|
|
||||||
2025-10-11 13:48:36 - Running Test Suite: Memory Corruption Tests
|
|
||||||
2025-10-11 13:48:36 - Description: Buffer overflow and memory safety testing
|
|
||||||
2025-10-11 13:48:36 - ==========================================
|
|
||||||
==========================================
|
|
||||||
C-Relay Memory Corruption Test Suite
|
|
||||||
==========================================
|
|
||||||
Testing against relay at ws://127.0.0.1:8888
|
|
||||||
Note: These tests may cause the relay to crash if vulnerabilities exist
|
|
||||||
|
|
||||||
=== Basic Connectivity Test ===
|
|
||||||
Testing Basic connectivity... [0;32mPASSED[0m - No memory corruption detected
|
|
||||||
|
|
||||||
=== Subscription ID Memory Corruption Tests ===
|
|
||||||
Testing Empty subscription ID... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Very long subscription ID (1KB)... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Very long subscription ID (10KB)... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Subscription ID with null bytes... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Subscription ID with special chars... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Unicode subscription ID... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Subscription ID with path traversal... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
|
|
||||||
=== Filter Array Memory Corruption Tests ===
|
|
||||||
Testing Too many filters (50)... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
|
|
||||||
=== Concurrent Access Memory Tests ===
|
|
||||||
Testing Concurrent subscription creation... ["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760204917502714788", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
[0;32mPASSED[0m - Concurrent access handled safely
|
|
||||||
Testing Concurrent CLOSE operations...
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
[0;32mPASSED[0m - Concurrent access handled safely
|
|
||||||
|
|
||||||
=== Malformed JSON Memory Tests ===
|
|
||||||
Testing Unclosed JSON object... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Mismatched brackets... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Extra closing brackets... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Null bytes in JSON... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
|
|
||||||
=== Large Message Memory Tests ===
|
|
||||||
Testing Very large filter array... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Very long search term... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
|
|
||||||
=== Test Results ===
|
|
||||||
Total tests: 17
|
|
||||||
Passed: [0;32m17[0m
|
|
||||||
Failed: [0;31m0[0m
|
|
||||||
[0;32m✓ All memory corruption tests passed![0m
|
|
||||||
The relay appears to handle memory safely.
|
|
||||||
2025-10-11 13:48:38 - \033[0;32m✓ Memory Corruption Tests PASSED\033[0m (Duration: 2s)
|
|
||||||
2025-10-11 13:48:38 - ==========================================
|
|
||||||
2025-10-11 13:48:38 - Running Test Suite: Input Validation Tests
|
|
||||||
2025-10-11 13:48:38 - Description: Comprehensive input boundary testing
|
|
||||||
2025-10-11 13:48:38 - ==========================================
|
|
||||||
==========================================
|
|
||||||
C-Relay Input Validation Test Suite
|
|
||||||
==========================================
|
|
||||||
Testing against relay at ws://127.0.0.1:8888
|
|
||||||
|
|
||||||
=== Basic Connectivity Test ===
|
|
||||||
Testing Basic connectivity... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
|
|
||||||
=== Message Type Validation ===
|
|
||||||
Testing Invalid message type - string... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Invalid message type - number... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Invalid message type - null... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Invalid message type - object... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Empty message type... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Very long message type... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Message Structure Validation ===
|
|
||||||
Testing Too few arguments... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Too many arguments... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Non-array message... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Empty array... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Nested arrays incorrectly... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Subscription ID Boundary Tests ===
|
|
||||||
Testing Valid subscription ID... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Empty subscription ID... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Subscription ID with spaces... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Subscription ID with newlines... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Subscription ID with tabs... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Subscription ID with control chars... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Unicode subscription ID... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Very long subscription ID... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Filter Object Validation ===
|
|
||||||
Testing Valid empty filter... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Non-object filter... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Null filter... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Array filter... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Filter with invalid keys... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
|
|
||||||
=== Authors Field Validation ===
|
|
||||||
Testing Valid authors array... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Empty authors array... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Non-array authors... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Invalid hex in authors... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Short pubkey in authors... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== IDs Field Validation ===
|
|
||||||
Testing Valid ids array... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Empty ids array... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Non-array ids... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Kinds Field Validation ===
|
|
||||||
Testing Valid kinds array... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Empty kinds array... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Non-array kinds... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing String in kinds... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Timestamp Field Validation ===
|
|
||||||
Testing Valid since timestamp... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Valid until timestamp... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing String since timestamp... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Negative timestamp... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Limit Field Validation ===
|
|
||||||
Testing Valid limit... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Zero limit... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing String limit... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Negative limit... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Multiple Filters ===
|
|
||||||
Testing Two valid filters... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Many filters... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
|
|
||||||
=== Test Results ===
|
|
||||||
Total tests: 47
|
|
||||||
Passed: 47
|
|
||||||
Failed: 0
|
|
||||||
[0;32m✓ All input validation tests passed![0m
|
|
||||||
The relay properly validates input.
|
|
||||||
2025-10-11 13:48:42 - \033[0;32m✓ Input Validation Tests PASSED\033[0m (Duration: 4s)
|
|
||||||
2025-10-11 13:48:42 -
|
|
||||||
2025-10-11 13:48:42 - \033[0;34m=== PERFORMANCE TEST SUITES ===\033[0m
|
|
||||||
2025-10-11 13:48:42 - ==========================================
|
|
||||||
2025-10-11 13:48:42 - Running Test Suite: Subscription Limit Tests
|
|
||||||
2025-10-11 13:48:42 - Description: Subscription limit enforcement testing
|
|
||||||
2025-10-11 13:48:42 - ==========================================
|
|
||||||
=== Subscription Limit Test ===
|
|
||||||
[INFO] Testing relay at: ws://127.0.0.1:8888
|
|
||||||
[INFO] Note: This test assumes default subscription limits (max 25 per client)
|
|
||||||
|
|
||||||
=== Test 1: Basic Connectivity ===
|
|
||||||
[INFO] Testing basic WebSocket connection...
|
|
||||||
[PASS] Basic connectivity works
|
|
||||||
|
|
||||||
=== Test 2: Subscription Limit Enforcement ===
|
|
||||||
[INFO] Testing subscription limits by creating multiple subscriptions...
|
|
||||||
[INFO] Creating multiple subscriptions within a single connection...
|
|
||||||
[INFO] Hit subscription limit at subscription 26
|
|
||||||
[PASS] Subscription limit enforcement working (limit hit after 25 subscriptions)
|
|
||||||
|
|
||||||
=== Test Complete ===
|
|
||||||
2025-10-11 13:48:42 - \033[0;32m✓ Subscription Limit Tests PASSED\033[0m (Duration: 0s)
|
|
||||||
2025-10-11 13:48:42 - ==========================================
|
|
||||||
2025-10-11 13:48:42 - Running Test Suite: Load Testing
|
|
||||||
2025-10-11 13:48:42 - Description: High concurrent connection testing
|
|
||||||
2025-10-11 13:48:42 - ==========================================
|
|
||||||
==========================================
|
|
||||||
C-Relay Load Testing Suite
|
|
||||||
==========================================
|
|
||||||
Testing against relay at ws://127.0.0.1:8888
|
|
||||||
|
|
||||||
=== Basic Connectivity Test ===
|
|
||||||
[0;31m✗ Cannot connect to relay. Aborting tests.[0m
|
|
||||||
2025-10-11 13:48:47 - \033[0;31m✗ Load Testing FAILED\033[0m (Duration: 5s)
|
|
||||||
@@ -1,728 +0,0 @@
|
|||||||
2025-10-11 14:11:34 - ==========================================
|
|
||||||
2025-10-11 14:11:34 - C-Relay Comprehensive Test Suite Runner
|
|
||||||
2025-10-11 14:11:34 - ==========================================
|
|
||||||
2025-10-11 14:11:34 - Relay URL: ws://127.0.0.1:8888
|
|
||||||
2025-10-11 14:11:34 - Log file: test_results_20251011_141134.log
|
|
||||||
2025-10-11 14:11:34 - Report file: test_report_20251011_141134.html
|
|
||||||
2025-10-11 14:11:34 -
|
|
||||||
2025-10-11 14:11:34 - Checking relay status at ws://127.0.0.1:8888...
|
|
||||||
2025-10-11 14:11:34 - \033[0;32m✓ Relay HTTP endpoint is accessible\033[0m
|
|
||||||
2025-10-11 14:11:34 -
|
|
||||||
2025-10-11 14:11:34 - Starting comprehensive test execution...
|
|
||||||
2025-10-11 14:11:34 -
|
|
||||||
2025-10-11 14:11:34 - \033[0;34m=== SECURITY TEST SUITES ===\033[0m
|
|
||||||
2025-10-11 14:11:34 - ==========================================
|
|
||||||
2025-10-11 14:11:34 - Running Test Suite: SQL Injection Tests
|
|
||||||
2025-10-11 14:11:34 - Description: Comprehensive SQL injection vulnerability testing
|
|
||||||
2025-10-11 14:11:34 - ==========================================
|
|
||||||
==========================================
|
|
||||||
C-Relay SQL Injection Test Suite
|
|
||||||
==========================================
|
|
||||||
Testing against relay at ws://127.0.0.1:8888
|
|
||||||
|
|
||||||
=== Basic Connectivity Test ===
|
|
||||||
Testing Basic connectivity... [0;32mPASSED[0m - Valid query works
|
|
||||||
|
|
||||||
=== Authors Filter SQL Injection Tests ===
|
|
||||||
Testing Authors filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Authors filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== IDs Filter SQL Injection Tests ===
|
|
||||||
Testing IDs filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing IDs filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Kinds Filter SQL Injection Tests ===
|
|
||||||
Testing Kinds filter with string injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Kinds filter with negative value... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Kinds filter with very large value... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Search Filter SQL Injection Tests ===
|
|
||||||
Testing Search filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing Search filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing Search filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing Search filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing Search filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Search filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Tag Filter SQL Injection Tests ===
|
|
||||||
Testing #e tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #e tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #p tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #t tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #r tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: /*... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: */... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: /**/... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing #d tag filter with payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
|
|
||||||
=== Timestamp Filter SQL Injection Tests ===
|
|
||||||
Testing Since parameter injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Until parameter injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Limit Parameter SQL Injection Tests ===
|
|
||||||
Testing Limit parameter injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Limit with UNION... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Complex Multi-Filter SQL Injection Tests ===
|
|
||||||
Testing Multi-filter with authors injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Multi-filter with search injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Multi-filter with tag injection... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
|
|
||||||
=== COUNT Message SQL Injection Tests ===
|
|
||||||
Testing COUNT with authors payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '; DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1' OR '1'='1... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing COUNT with authors payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1; SELECT * FROM sqlite_master; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' OR 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: admin'--... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1' UNION SELECT password FROM users --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1' AND SLEEP(5) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1' AND 1=1 WAITFOR DELAY '0:0:5' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1' AND 1=CAST((SELECT version()) AS INT) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 1' AND 1=CONVERT(INT, (SELECT @@VERSION)) --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' UNION SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' UNION SELECT 1,2,3 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' UNION ALL SELECT NULL,NULL,NULL --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '; SELECT * FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '; DELETE FROM events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '; UPDATE events SET content='hacked' WHERE 1=1; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: /*... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: */... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: /**/... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: #... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: #... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing COUNT with authors payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: 0x53514C5F494E4A454354494F4E... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing COUNT with authors payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: J1NSTCBJTkpFQ1RJT04gLS0=... [0;32mPASSED[0m - SQL injection blocked (query sanitized)
|
|
||||||
Testing COUNT with authors payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '))); DROP TABLE events; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ')) UNION SELECT NULL; --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' AND 1=1 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' AND 1=2 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: ' AND (SELECT COUNT(*) FROM events) > 0 --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '; EXEC master..xp_cmdshell 'net user' --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with authors payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing COUNT with search payload: '; DECLARE @host varchar(1024); SELECT @host=(SELECT TOP 1 master..sys.fn_varbintohexstr(password_hash) FROM sys.sql_logins WHERE name='sa'); --... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Edge Case SQL Injection Tests ===
|
|
||||||
Testing Empty string injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Null byte injection... [0;32mPASSED[0m - SQL injection blocked (silently rejected)
|
|
||||||
Testing Unicode injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Very long injection payload... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Subscription ID SQL Injection Tests ===
|
|
||||||
Testing Subscription ID injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
Testing Subscription ID with quotes... [0;32mPASSED[0m - SQL injection blocked (silently rejected)
|
|
||||||
|
|
||||||
=== CLOSE Message SQL Injection Tests ===
|
|
||||||
Testing CLOSE with injection... [0;32mPASSED[0m - SQL injection blocked (rejected with error)
|
|
||||||
|
|
||||||
=== Test Results ===
|
|
||||||
Total tests: 318
|
|
||||||
Passed: [0;32m318[0m
|
|
||||||
Failed: [0;31m0[0m
|
|
||||||
[0;32m✓ All SQL injection tests passed![0m
|
|
||||||
The relay appears to be protected against SQL injection attacks.
|
|
||||||
2025-10-11 14:11:56 - \033[0;32m✓ SQL Injection Tests PASSED\033[0m (Duration: 22s)
|
|
||||||
2025-10-11 14:11:56 - ==========================================
|
|
||||||
2025-10-11 14:11:56 - Running Test Suite: Filter Validation Tests
|
|
||||||
2025-10-11 14:11:56 - Description: Input validation for REQ and COUNT messages
|
|
||||||
2025-10-11 14:11:56 - ==========================================
|
|
||||||
=== C-Relay Filter Validation Tests ===
|
|
||||||
Testing against relay at ws://127.0.0.1:8888
|
|
||||||
|
|
||||||
Testing Valid REQ message... [0;32mPASSED[0m
|
|
||||||
Testing Valid COUNT message... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Filter Array Validation ===
|
|
||||||
Testing Non-object filter... [0;32mPASSED[0m
|
|
||||||
Testing Too many filters... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Authors Validation ===
|
|
||||||
Testing Invalid author type... [0;32mPASSED[0m
|
|
||||||
Testing Invalid author hex... [0;32mPASSED[0m
|
|
||||||
Testing Too many authors... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing IDs Validation ===
|
|
||||||
Testing Invalid ID type... [0;32mPASSED[0m
|
|
||||||
Testing Invalid ID hex... [0;32mPASSED[0m
|
|
||||||
Testing Too many IDs... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Kinds Validation ===
|
|
||||||
Testing Invalid kind type... [0;32mPASSED[0m
|
|
||||||
Testing Negative kind... [0;32mPASSED[0m
|
|
||||||
Testing Too large kind... [0;32mPASSED[0m
|
|
||||||
Testing Too many kinds... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Timestamp Validation ===
|
|
||||||
Testing Invalid since type... [0;32mPASSED[0m
|
|
||||||
Testing Negative since... [0;32mPASSED[0m
|
|
||||||
Testing Invalid until type... [0;32mPASSED[0m
|
|
||||||
Testing Negative until... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Limit Validation ===
|
|
||||||
Testing Invalid limit type... [0;32mPASSED[0m
|
|
||||||
Testing Negative limit... [0;32mPASSED[0m
|
|
||||||
Testing Too large limit... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Search Validation ===
|
|
||||||
Testing Invalid search type... [0;32mPASSED[0m
|
|
||||||
Testing Search too long... [0;32mPASSED[0m
|
|
||||||
Testing Search SQL injection... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Tag Filter Validation ===
|
|
||||||
Testing Invalid tag filter type... [0;32mPASSED[0m
|
|
||||||
Testing Too many tag values... [0;32mPASSED[0m
|
|
||||||
Testing Tag value too long... [0;32mPASSED[0m
|
|
||||||
|
|
||||||
=== Testing Rate Limiting ===
|
|
||||||
Testing rate limiting with malformed requests... [1;33mUNCERTAIN[0m - Rate limiting may not have triggered (this could be normal)
|
|
||||||
|
|
||||||
=== Test Results ===
|
|
||||||
Total tests: 28
|
|
||||||
Passed: [0;32m28[0m
|
|
||||||
Failed: [0;31m0[0m
|
|
||||||
[0;32mAll tests passed![0m
|
|
||||||
2025-10-11 14:12:02 - \033[0;32m✓ Filter Validation Tests PASSED\033[0m (Duration: 6s)
|
|
||||||
2025-10-11 14:12:02 - ==========================================
|
|
||||||
2025-10-11 14:12:02 - Running Test Suite: Subscription Validation Tests
|
|
||||||
2025-10-11 14:12:02 - Description: Subscription ID and message validation
|
|
||||||
2025-10-11 14:12:02 - ==========================================
|
|
||||||
Testing subscription ID validation fixes...
|
|
||||||
Testing malformed subscription IDs...
|
|
||||||
Valid ID test: Success
|
|
||||||
Testing CLOSE message validation...
|
|
||||||
CLOSE valid ID test: Success
|
|
||||||
Subscription validation tests completed.
|
|
||||||
2025-10-11 14:12:02 - \033[0;32m✓ Subscription Validation Tests PASSED\033[0m (Duration: 0s)
|
|
||||||
2025-10-11 14:12:02 - ==========================================
|
|
||||||
2025-10-11 14:12:02 - Running Test Suite: Memory Corruption Tests
|
|
||||||
2025-10-11 14:12:02 - Description: Buffer overflow and memory safety testing
|
|
||||||
2025-10-11 14:12:02 - ==========================================
|
|
||||||
==========================================
|
|
||||||
C-Relay Memory Corruption Test Suite
|
|
||||||
==========================================
|
|
||||||
Testing against relay at ws://127.0.0.1:8888
|
|
||||||
Note: These tests may cause the relay to crash if vulnerabilities exist
|
|
||||||
|
|
||||||
=== Basic Connectivity Test ===
|
|
||||||
Testing Basic connectivity... [0;32mPASSED[0m - No memory corruption detected
|
|
||||||
|
|
||||||
=== Subscription ID Memory Corruption Tests ===
|
|
||||||
Testing Empty subscription ID... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Very long subscription ID (1KB)... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Very long subscription ID (10KB)... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Subscription ID with null bytes... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Subscription ID with special chars... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Unicode subscription ID... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Subscription ID with path traversal... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
|
|
||||||
=== Filter Array Memory Corruption Tests ===
|
|
||||||
Testing Too many filters (50)... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
|
|
||||||
=== Concurrent Access Memory Tests ===
|
|
||||||
Testing Concurrent subscription creation... ["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
["EVENT", "concurrent_1760206323991056473", { "id": "b3a2a79b768c304a8ad315a97319e3c6fd9d521844fc9f1e4228c75c453dd882", "pubkey": "aa4fc8665f5696e33db7e1a572e3b0f5b3d615837b0f362dcb1c8068b098c7b4", "created_at": 1760196143, "kind": 30001, "content": "Updated addressable event", "sig": "795671a831de31fbbdd6282585529f274f61bb6e8c974e597560d70989355f24c8ecfe70caf043e8fbc24ce65d9b0d562297c682af958cfcdd2ee137dd9bccb4", "tags": [["d", "test-article"], ["type", "addressable"], ["updated", "true"]] }]
|
|
||||||
[0;32mPASSED[0m - Concurrent access handled safely
|
|
||||||
Testing Concurrent CLOSE operations...
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
[0;32mPASSED[0m - Concurrent access handled safely
|
|
||||||
|
|
||||||
=== Malformed JSON Memory Tests ===
|
|
||||||
Testing Unclosed JSON object... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Mismatched brackets... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Extra closing brackets... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Null bytes in JSON... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
|
|
||||||
=== Large Message Memory Tests ===
|
|
||||||
Testing Very large filter array... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
Testing Very long search term... [1;33mUNCERTAIN[0m - Expected error but got normal response
|
|
||||||
|
|
||||||
=== Test Results ===
|
|
||||||
Total tests: 17
|
|
||||||
Passed: [0;32m17[0m
|
|
||||||
Failed: [0;31m0[0m
|
|
||||||
[0;32m✓ All memory corruption tests passed![0m
|
|
||||||
The relay appears to handle memory safely.
|
|
||||||
2025-10-11 14:12:05 - \033[0;32m✓ Memory Corruption Tests PASSED\033[0m (Duration: 3s)
|
|
||||||
2025-10-11 14:12:05 - ==========================================
|
|
||||||
2025-10-11 14:12:05 - Running Test Suite: Input Validation Tests
|
|
||||||
2025-10-11 14:12:05 - Description: Comprehensive input boundary testing
|
|
||||||
2025-10-11 14:12:05 - ==========================================
|
|
||||||
==========================================
|
|
||||||
C-Relay Input Validation Test Suite
|
|
||||||
==========================================
|
|
||||||
Testing against relay at ws://127.0.0.1:8888
|
|
||||||
|
|
||||||
=== Basic Connectivity Test ===
|
|
||||||
Testing Basic connectivity... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
|
|
||||||
=== Message Type Validation ===
|
|
||||||
Testing Invalid message type - string... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Invalid message type - number... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Invalid message type - null... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Invalid message type - object... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Empty message type... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Very long message type... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Message Structure Validation ===
|
|
||||||
Testing Too few arguments... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Too many arguments... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Non-array message... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Empty array... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Nested arrays incorrectly... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Subscription ID Boundary Tests ===
|
|
||||||
Testing Valid subscription ID... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Empty subscription ID... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Subscription ID with spaces... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Subscription ID with newlines... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Subscription ID with tabs... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Subscription ID with control chars... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Unicode subscription ID... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Very long subscription ID... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Filter Object Validation ===
|
|
||||||
Testing Valid empty filter... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Non-object filter... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Null filter... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Array filter... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Filter with invalid keys... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
|
|
||||||
=== Authors Field Validation ===
|
|
||||||
Testing Valid authors array... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Empty authors array... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Non-array authors... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Invalid hex in authors... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Short pubkey in authors... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== IDs Field Validation ===
|
|
||||||
Testing Valid ids array... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Empty ids array... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Non-array ids... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Kinds Field Validation ===
|
|
||||||
Testing Valid kinds array... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Empty kinds array... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Non-array kinds... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing String in kinds... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Timestamp Field Validation ===
|
|
||||||
Testing Valid since timestamp... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Valid until timestamp... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing String since timestamp... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Negative timestamp... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Limit Field Validation ===
|
|
||||||
Testing Valid limit... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Zero limit... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing String limit... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
Testing Negative limit... [0;32mPASSED[0m - Invalid input properly rejected
|
|
||||||
|
|
||||||
=== Multiple Filters ===
|
|
||||||
Testing Two valid filters... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
Testing Many filters... [0;32mPASSED[0m - Input accepted correctly
|
|
||||||
|
|
||||||
=== Test Results ===
|
|
||||||
Total tests: 47
|
|
||||||
Passed: 47
|
|
||||||
Failed: 0
|
|
||||||
[0;32m✓ All input validation tests passed![0m
|
|
||||||
The relay properly validates input.
|
|
||||||
2025-10-11 14:12:08 - \033[0;32m✓ Input Validation Tests PASSED\033[0m (Duration: 3s)
|
|
||||||
2025-10-11 14:12:08 -
|
|
||||||
2025-10-11 14:12:08 - \033[0;34m=== PERFORMANCE TEST SUITES ===\033[0m
|
|
||||||
2025-10-11 14:12:08 - ==========================================
|
|
||||||
2025-10-11 14:12:08 - Running Test Suite: Subscription Limit Tests
|
|
||||||
2025-10-11 14:12:08 - Description: Subscription limit enforcement testing
|
|
||||||
2025-10-11 14:12:08 - ==========================================
|
|
||||||
=== Subscription Limit Test ===
|
|
||||||
[INFO] Testing relay at: ws://127.0.0.1:8888
|
|
||||||
[INFO] Note: This test assumes default subscription limits (max 25 per client)
|
|
||||||
|
|
||||||
=== Test 1: Basic Connectivity ===
|
|
||||||
[INFO] Testing basic WebSocket connection...
|
|
||||||
[PASS] Basic connectivity works
|
|
||||||
|
|
||||||
=== Test 2: Subscription Limit Enforcement ===
|
|
||||||
[INFO] Testing subscription limits by creating multiple subscriptions...
|
|
||||||
[INFO] Creating multiple subscriptions within a single connection...
|
|
||||||
[INFO] Hit subscription limit at subscription 26
|
|
||||||
[PASS] Subscription limit enforcement working (limit hit after 25 subscriptions)
|
|
||||||
|
|
||||||
=== Test Complete ===
|
|
||||||
2025-10-11 14:12:09 - \033[0;32m✓ Subscription Limit Tests PASSED\033[0m (Duration: 1s)
|
|
||||||
2025-10-11 14:12:09 - ==========================================
|
|
||||||
2025-10-11 14:12:09 - Running Test Suite: Load Testing
|
|
||||||
2025-10-11 14:12:09 - Description: High concurrent connection testing
|
|
||||||
2025-10-11 14:12:09 - ==========================================
|
|
||||||
==========================================
|
|
||||||
C-Relay Load Testing Suite
|
|
||||||
==========================================
|
|
||||||
Testing against relay at ws://127.0.0.1:8888
|
|
||||||
|
|
||||||
=== Basic Connectivity Test ===
|
|
||||||
[0;32m✓ Relay is accessible[0m
|
|
||||||
|
|
||||||
==========================================
|
|
||||||
Load Test: Light Load Test
|
|
||||||
Description: Basic load test with moderate concurrent connections
|
|
||||||
Concurrent clients: 10
|
|
||||||
Messages per client: 5
|
|
||||||
==========================================
|
|
||||||
Launching 10 clients...
|
|
||||||
All clients completed. Processing results...
|
|
||||||
|
|
||||||
=== Load Test Results ===
|
|
||||||
Test duration: 1s
|
|
||||||
Total connections attempted: 10
|
|
||||||
Successful connections: 10
|
|
||||||
Failed connections: 0
|
|
||||||
Connection success rate: 100%
|
|
||||||
Messages expected: 50
|
|
||||||
Messages sent: 50
|
|
||||||
Messages received: 260
|
|
||||||
[0;32m✓ EXCELLENT: High connection success rate[0m
|
|
||||||
|
|
||||||
Checking relay responsiveness... [0;32m✓ Relay is still responsive[0m
|
|
||||||
|
|
||||||
==========================================
|
|
||||||
Load Test: Medium Load Test
|
|
||||||
Description: Moderate load test with higher concurrency
|
|
||||||
Concurrent clients: 25
|
|
||||||
Messages per client: 10
|
|
||||||
==========================================
|
|
||||||
Launching 25 clients...
|
|
||||||
All clients completed. Processing results...
|
|
||||||
|
|
||||||
=== Load Test Results ===
|
|
||||||
Test duration: 3s
|
|
||||||
Total connections attempted: 35
|
|
||||||
Successful connections: 25
|
|
||||||
Failed connections: 0
|
|
||||||
Connection success rate: 71%
|
|
||||||
Messages expected: 250
|
|
||||||
Messages sent: 250
|
|
||||||
Messages received: 1275
|
|
||||||
[0;31m✗ POOR: Low connection success rate[0m
|
|
||||||
|
|
||||||
Checking relay responsiveness... [0;32m✓ Relay is still responsive[0m
|
|
||||||
|
|
||||||
==========================================
|
|
||||||
Load Test: Heavy Load Test
|
|
||||||
Description: Heavy load test with high concurrency
|
|
||||||
Concurrent clients: 50
|
|
||||||
Messages per client: 20
|
|
||||||
==========================================
|
|
||||||
Launching 50 clients...
|
|
||||||
All clients completed. Processing results...
|
|
||||||
|
|
||||||
=== Load Test Results ===
|
|
||||||
Test duration: 13s
|
|
||||||
Total connections attempted: 85
|
|
||||||
Successful connections: 50
|
|
||||||
Failed connections: 0
|
|
||||||
Connection success rate: 58%
|
|
||||||
Messages expected: 1000
|
|
||||||
Messages sent: 1000
|
|
||||||
Messages received: 5050
|
|
||||||
[0;31m✗ POOR: Low connection success rate[0m
|
|
||||||
|
|
||||||
Checking relay responsiveness... [0;32m✓ Relay is still responsive[0m
|
|
||||||
|
|
||||||
==========================================
|
|
||||||
Load Test: Stress Test
|
|
||||||
Description: Maximum load test to find breaking point
|
|
||||||
Concurrent clients: 100
|
|
||||||
Messages per client: 50
|
|
||||||
==========================================
|
|
||||||
Launching 100 clients...
|
|
||||||
All clients completed. Processing results...
|
|
||||||
|
|
||||||
=== Load Test Results ===
|
|
||||||
Test duration: 63s
|
|
||||||
Total connections attempted: 185
|
|
||||||
Successful connections: 100
|
|
||||||
Failed connections: 0
|
|
||||||
Connection success rate: 54%
|
|
||||||
Messages expected: 5000
|
|
||||||
Messages sent: 5000
|
|
||||||
Messages received: 15100
|
|
||||||
[0;31m✗ POOR: Low connection success rate[0m
|
|
||||||
|
|
||||||
Checking relay responsiveness... [0;32m✓ Relay is still responsive[0m
|
|
||||||
|
|
||||||
==========================================
|
|
||||||
Load Testing Complete
|
|
||||||
==========================================
|
|
||||||
All load tests completed. Check individual test results above.
|
|
||||||
If any tests failed, the relay may need optimization or have resource limits.
|
|
||||||
2025-10-11 14:13:31 - \033[0;32m✓ Load Testing PASSED\033[0m (Duration: 82s)
|
|
||||||
2025-10-11 14:13:31 - ==========================================
|
|
||||||
2025-10-11 14:13:31 - Running Test Suite: Stress Testing
|
|
||||||
2025-10-11 14:13:31 - Description: Resource usage and stability testing
|
|
||||||
2025-10-11 14:13:31 - ==========================================
|
|
||||||
2025-10-11 14:13:31 - \033[0;31mERROR: Test script stress_tests.sh not found\033[0m
|
|
||||||
1
text_graph
Submodule
1
text_graph
Submodule
Submodule text_graph added at bf1785f372
Reference in New Issue
Block a user